rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
03017b9
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "5", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.2", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1074.049024, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 47376.666624, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 8.281189453125, | |
"mean": 8.281189453125, | |
"stdev": 0.0, | |
"p50": 8.281189453125, | |
"p90": 8.281189453125, | |
"p95": 8.281189453125, | |
"p99": 8.281189453125, | |
"values": [ | |
8.281189453125 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1244.471296, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 188712.579072, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 161, | |
"total": 0.9964290826320651, | |
"mean": 0.0061890005132426386, | |
"stdev": 0.0010288657493425033, | |
"p50": 0.006036145210266113, | |
"p90": 0.006516143798828125, | |
"p95": 0.006758863925933838, | |
"p99": 0.009536616516113287, | |
"values": [ | |
0.00650926399230957, | |
0.00640702486038208, | |
0.006365903854370117, | |
0.006579984188079834, | |
0.006585103988647461, | |
0.006371984958648682, | |
0.006391983985900879, | |
0.0061796650886535645, | |
0.006163984775543213, | |
0.006135984897613526, | |
0.006105585098266602, | |
0.006060625076293945, | |
0.005963665008544922, | |
0.0059502248764038085, | |
0.005911026000976562, | |
0.005913744926452637, | |
0.005871826171875, | |
0.00586606502532959, | |
0.005814226150512696, | |
0.005828306198120117, | |
0.005836784839630127, | |
0.0058388662338256835, | |
0.005826866149902344, | |
0.0058292651176452635, | |
0.0058319859504699706, | |
0.005796465873718262, | |
0.005829905033111572, | |
0.005813906192779541, | |
0.005827985763549805, | |
0.005810544967651367, | |
0.005825265884399414, | |
0.0058081459999084475, | |
0.009197736740112305, | |
0.006134705066680908, | |
0.005979344844818115, | |
0.005974065780639648, | |
0.005991984844207763, | |
0.005954705238342285, | |
0.016875320434570313, | |
0.004004149913787842, | |
0.003996629953384399, | |
0.004011350154876709, | |
0.0040003108978271485, | |
0.004335508823394775, | |
0.005948945045471192, | |
0.005973105907440186, | |
0.005983185768127441, | |
0.005957904815673828, | |
0.005970864772796631, | |
0.0059715061187744145, | |
0.005991825103759766, | |
0.0062169451713562016, | |
0.005987505912780761, | |
0.005999024868011475, | |
0.005981744766235352, | |
0.005995345115661621, | |
0.006027825832366943, | |
0.005965906143188477, | |
0.005985905170440673, | |
0.00598558521270752, | |
0.00598110580444336, | |
0.005968625068664551, | |
0.006172784805297852, | |
0.005989904880523682, | |
0.00599166488647461, | |
0.005995824813842774, | |
0.005993745803833008, | |
0.006019666194915772, | |
0.005983184814453125, | |
0.006038865089416504, | |
0.006038704872131348, | |
0.006030065059661865, | |
0.006045585155487061, | |
0.006021426200866699, | |
0.00601870584487915, | |
0.0060236649513244625, | |
0.006249423980712891, | |
0.006066065788269043, | |
0.006064626216888427, | |
0.006050546169281006, | |
0.006066225051879883, | |
0.006035025119781494, | |
0.006017264842987061, | |
0.006032304763793945, | |
0.0060334248542785645, | |
0.006036145210266113, | |
0.006062864780426025, | |
0.006031025886535644, | |
0.006040785789489746, | |
0.006007345199584961, | |
0.006059024810791016, | |
0.006011505126953125, | |
0.006045424938201904, | |
0.0060108652114868166, | |
0.006011664867401123, | |
0.006011506080627441, | |
0.006038545131683349, | |
0.005996944904327392, | |
0.006043344974517822, | |
0.006005905151367188, | |
0.0060303850173950195, | |
0.006015666007995605, | |
0.006015026092529297, | |
0.006056465148925781, | |
0.005997265815734863, | |
0.006025424957275391, | |
0.00603742504119873, | |
0.006023825168609619, | |
0.0060236649513244625, | |
0.006038384914398193, | |
0.010044936180114747, | |
0.006339503765106201, | |
0.006032304763793945, | |
0.006181904792785645, | |
0.006247185230255127, | |
0.006444785118103027, | |
0.006297584056854248, | |
0.006355504035949707, | |
0.006314384937286377, | |
0.006268464088439942, | |
0.006419185161590576, | |
0.0061772651672363284, | |
0.0063998250961303715, | |
0.006291985034942627, | |
0.006302063941955567, | |
0.006375024795532227, | |
0.006205103874206543, | |
0.006422224998474121, | |
0.006292623996734619, | |
0.006306065082550049, | |
0.006361905097961426, | |
0.006275343894958496, | |
0.006418224811553955, | |
0.006014706134796143, | |
0.005984785079956055, | |
0.006023184776306152, | |
0.0059948649406433106, | |
0.006516143798828125, | |
0.0067267041206359865, | |
0.00671582317352295, | |
0.006780303001403808, | |
0.006650063991546631, | |
0.006385744094848633, | |
0.006263504981994629, | |
0.006339663982391357, | |
0.00635182523727417, | |
0.006239824771881104, | |
0.006396463871002197, | |
0.0063791851997375484, | |
0.006863022804260254, | |
0.006811023235321045, | |
0.006758862972259521, | |
0.0068419032096862795, | |
0.006758863925933838, | |
0.006793103218078613, | |
0.006649104118347168, | |
0.006414225101470947, | |
0.006398384094238281, | |
0.006291823863983154, | |
0.00629198408126831, | |
0.00616462516784668 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 161.57697803713128 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |