rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
150b347
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "4", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1036.931072, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 44530.806784, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 8.09866259765625, | |
"mean": 8.09866259765625, | |
"stdev": 0.0, | |
"p50": 8.09866259765625, | |
"p90": 8.09866259765625, | |
"p95": 8.09866259765625, | |
"p99": 8.09866259765625, | |
"values": [ | |
8.09866259765625 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1208.87296, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 182024.798208, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 152, | |
"total": 0.9954571251869204, | |
"mean": 0.006549060034124474, | |
"stdev": 0.0003512472554091744, | |
"p50": 0.006419256448745728, | |
"p90": 0.006899286031723022, | |
"p95": 0.006924790430068969, | |
"p99": 0.007043945255279542, | |
"values": [ | |
0.006685894966125488, | |
0.006976453781127929, | |
0.00668285608291626, | |
0.006532296180725098, | |
0.006596295833587646, | |
0.006621254920959473, | |
0.006544456005096435, | |
0.006573575019836426, | |
0.006514535903930664, | |
0.006508615970611573, | |
0.006431015968322754, | |
0.006511496067047119, | |
0.006510536193847657, | |
0.006351175785064697, | |
0.006305897235870361, | |
0.006332616806030273, | |
0.006301736831665039, | |
0.00625581693649292, | |
0.006219656944274903, | |
0.006270216941833496, | |
0.006271176815032959, | |
0.006246857166290283, | |
0.006340297222137451, | |
0.006350057125091553, | |
0.0062937369346618656, | |
0.006278215885162353, | |
0.006279336929321289, | |
0.00626829719543457, | |
0.006330376148223877, | |
0.006289737224578857, | |
0.006313577175140381, | |
0.006387335777282715, | |
0.009122526168823242, | |
0.0067174148559570315, | |
0.006497416019439697, | |
0.0065833358764648435, | |
0.006717255115509033, | |
0.006850214004516602, | |
0.006895975112915039, | |
0.006635974884033203, | |
0.006524295806884766, | |
0.0070033340454101565, | |
0.00685629415512085, | |
0.0068391752243042, | |
0.0069090938568115235, | |
0.0068642950057983395, | |
0.006894053936004639, | |
0.006813254833221436, | |
0.007086214065551758, | |
0.006895974159240723, | |
0.006860614776611328, | |
0.006871335029602051, | |
0.006840775012969971, | |
0.006834535121917724, | |
0.006848295211791992, | |
0.0068495750427246095, | |
0.0068871750831604, | |
0.006859014987945557, | |
0.006860774993896484, | |
0.006900613784790039, | |
0.0068422150611877446, | |
0.0068318147659301755, | |
0.0068495750427246095, | |
0.006884775161743164, | |
0.006870213985443115, | |
0.006891654968261719, | |
0.006487016201019287, | |
0.006343815803527832, | |
0.006278536796569824, | |
0.006243977069854736, | |
0.006359655857086182, | |
0.006324137210845947, | |
0.006271817207336426, | |
0.006293257236480713, | |
0.006289257049560547, | |
0.006263657093048096, | |
0.0062399768829345705, | |
0.006493415832519531, | |
0.0061596579551696775, | |
0.006152457237243653, | |
0.006137416839599609, | |
0.006170536994934082, | |
0.006311336994171143, | |
0.006265256881713867, | |
0.006315336227416992, | |
0.006288136959075928, | |
0.006294857025146485, | |
0.0063215761184692385, | |
0.0064074969291687016, | |
0.006287655830383301, | |
0.006357896804809571, | |
0.006340456008911133, | |
0.006244616985321045, | |
0.0063580570220947265, | |
0.006298696041107178, | |
0.006268937110900879, | |
0.0063226971626281735, | |
0.006280935764312744, | |
0.006243017196655274, | |
0.0063329367637634274, | |
0.006342055797576904, | |
0.006247817039489746, | |
0.006357737064361572, | |
0.006305895805358887, | |
0.006253256797790528, | |
0.006334376811981201, | |
0.0063201370239257815, | |
0.006518375873565674, | |
0.006403017044067383, | |
0.006257576942443848, | |
0.006368455886840821, | |
0.006322856903076172, | |
0.0063790159225463865, | |
0.006318057060241699, | |
0.006304137229919434, | |
0.006344137191772461, | |
0.006140777111053467, | |
0.0061398172378540035, | |
0.006137258052825928, | |
0.0061431779861450195, | |
0.006143657207489014, | |
0.006127336978912354, | |
0.006130057811737061, | |
0.006143657207489014, | |
0.0063338961601257324, | |
0.0061401381492614746, | |
0.006454376220703125, | |
0.006934053897857666, | |
0.006915654182434082, | |
0.006923494815826416, | |
0.0068681340217590335, | |
0.00691997480392456, | |
0.0069391741752624515, | |
0.006926373958587646, | |
0.006901094913482666, | |
0.006825094223022461, | |
0.006936775207519531, | |
0.006920613765716553, | |
0.006879495143890381, | |
0.006899653911590576, | |
0.006866374969482422, | |
0.006881093978881836, | |
0.006864455223083496, | |
0.006855653762817383, | |
0.006851174831390381, | |
0.006854694843292236, | |
0.006851494789123535, | |
0.006888935089111328, | |
0.006859815120697022, | |
0.006850375175476074, | |
0.0068447737693786625, | |
0.006874693870544434 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 152.69366821947102 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |