rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
8bdf579
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.0.dev20231010+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "6", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.2", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 947.253248, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 45910.536192, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 8.03431005859375, | |
"mean": 8.03431005859375, | |
"stdev": 0.0, | |
"p50": 8.03431005859375, | |
"p90": 8.03431005859375, | |
"p95": 8.03431005859375, | |
"p99": 8.03431005859375, | |
"values": [ | |
8.03431005859375 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1119.326208, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 178943.561728, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 162, | |
"total": 0.997132691383362, | |
"mean": 0.006155140070267665, | |
"stdev": 0.00031447925029985414, | |
"p50": 0.00598904800415039, | |
"p90": 0.006611928796768188, | |
"p95": 0.006633265066146851, | |
"p99": 0.0067156409454345685, | |
"values": [ | |
0.0063369679450988765, | |
0.006231688976287842, | |
0.0061521677970886235, | |
0.006261288166046142, | |
0.006341448783874512, | |
0.006278088092803955, | |
0.00631264877319336, | |
0.006290408134460449, | |
0.006295369148254394, | |
0.006232168197631836, | |
0.006191688060760498, | |
0.006133448123931884, | |
0.006166247844696045, | |
0.006129928112030029, | |
0.006074567794799804, | |
0.0060923280715942385, | |
0.006039527893066407, | |
0.006034088134765625, | |
0.006002088069915771, | |
0.006033449172973633, | |
0.005986887931823731, | |
0.005998568058013916, | |
0.0059761681556701664, | |
0.0059926481246948245, | |
0.0060000081062316895, | |
0.006048488140106201, | |
0.006002088069915771, | |
0.005991208076477051, | |
0.0059432082176208495, | |
0.006005928039550781, | |
0.005976967811584473, | |
0.00597264814376831, | |
0.005950088024139404, | |
0.005966567993164062, | |
0.007174569129943848, | |
0.006127528190612793, | |
0.005911686897277832, | |
0.005898727893829346, | |
0.005881927967071533, | |
0.00587664794921875, | |
0.0058628878593444825, | |
0.005886407852172851, | |
0.005920968055725098, | |
0.005889448165893554, | |
0.005888967990875244, | |
0.005896487236022949, | |
0.005914728164672852, | |
0.0059075279235839846, | |
0.005885767936706543, | |
0.005888487815856934, | |
0.005910247802734375, | |
0.005918248176574707, | |
0.005937928199768066, | |
0.00591920804977417, | |
0.005953127861022949, | |
0.005939208030700684, | |
0.0059220871925354, | |
0.005921926975250244, | |
0.00612976884841919, | |
0.00656112813949585, | |
0.006629768848419189, | |
0.006529288768768311, | |
0.006614889144897461, | |
0.006644329071044922, | |
0.00659584903717041, | |
0.006614247798919678, | |
0.006639049053192138, | |
0.006648969173431396, | |
0.00661776876449585, | |
0.006622408866882324, | |
0.006657608985900879, | |
0.006552329063415527, | |
0.006172647953033447, | |
0.006159687995910644, | |
0.006199048042297363, | |
0.0059131278991699215, | |
0.006149289131164551, | |
0.005901766777038574, | |
0.005916647911071778, | |
0.005894887924194336, | |
0.005917128086090088, | |
0.005908008098602295, | |
0.005888008117675782, | |
0.0059032077789306645, | |
0.00588480806350708, | |
0.005889448165893554, | |
0.005891366958618164, | |
0.005871047973632812, | |
0.005904327869415283, | |
0.005868328094482422, | |
0.005882408142089844, | |
0.005885767936706543, | |
0.0059008078575134276, | |
0.005879208087921143, | |
0.005913287162780762, | |
0.00590880823135376, | |
0.005896008014678955, | |
0.005869287967681885, | |
0.005876808166503906, | |
0.005867527961730957, | |
0.005880008220672608, | |
0.005876008033752441, | |
0.005866726875305176, | |
0.005847847938537597, | |
0.005893767833709717, | |
0.0059001679420471196, | |
0.0058838481903076174, | |
0.005903687953948975, | |
0.005888967990875244, | |
0.0058795280456542965, | |
0.005864167213439941, | |
0.005868008136749267, | |
0.005849448204040528, | |
0.005875527858734131, | |
0.005871047973632812, | |
0.005865128040313721, | |
0.005871367931365967, | |
0.005885127067565918, | |
0.005860328197479248, | |
0.005887368202209473, | |
0.0058630480766296384, | |
0.005895847797393799, | |
0.005863687992095947, | |
0.005852807998657226, | |
0.005878087043762207, | |
0.005878407955169678, | |
0.005876808166503906, | |
0.005882408142089844, | |
0.006233767986297607, | |
0.006612168788909912, | |
0.006604969024658203, | |
0.006640328884124756, | |
0.0066334490776062014, | |
0.006639369010925293, | |
0.0068064088821411135, | |
0.006617128849029541, | |
0.006540808200836181, | |
0.006555849075317382, | |
0.006524168968200684, | |
0.0065502490997314455, | |
0.00660640811920166, | |
0.006553927898406983, | |
0.00654512882232666, | |
0.006576649188995361, | |
0.006537929058074952, | |
0.006575528144836426, | |
0.006606409072875977, | |
0.006592648983001709, | |
0.006566888809204101, | |
0.006596649169921875, | |
0.0065571279525756835, | |
0.006609768867492676, | |
0.006606089115142822, | |
0.006627849102020264, | |
0.0066003289222717285, | |
0.006592809200286865, | |
0.006553607940673828, | |
0.006592809200286865, | |
0.006592488765716552, | |
0.0065675277709960934, | |
0.0065721688270568845, | |
0.006588168144226074 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 162.46583970208718 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |