rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
b860198
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "5", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 2, | |
"sequence_length": 16, | |
"num_choices": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082014.490624, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-122-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.5.0.dev0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.46.3", | |
"transformers_commit": null, | |
"accelerate_version": "1.1.1", | |
"accelerate_commit": null, | |
"diffusers_version": "0.31.0", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.11", | |
"timm_commit": null, | |
"peft_version": "0.13.2", | |
"peft_commit": null | |
}, | |
"print_report": true, | |
"log_report": true | |
}, | |
"report": { | |
"load_model": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1098.932224, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 5708.582912, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"values": [ | |
0.13411283874511717 | |
], | |
"count": 1, | |
"total": 0.13411283874511717, | |
"mean": 0.13411283874511717, | |
"p50": 0.13411283874511717, | |
"p90": 0.13411283874511717, | |
"p95": 0.13411283874511717, | |
"p99": 0.13411283874511717, | |
"stdev": 0, | |
"stdev_": 0 | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1269.051392, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 179653.2224, | |
"max_reserved": 400.556032, | |
"max_allocated": 373.34528 | |
}, | |
"latency": { | |
"unit": "s", | |
"values": [ | |
0.006492465972900391, | |
0.006262546062469483, | |
0.006309106826782227, | |
0.006304786205291748, | |
0.0062655858993530275, | |
0.006264307022094726, | |
0.006300305843353272, | |
0.006272626876831055, | |
0.006315186977386474, | |
0.006284945964813232, | |
0.0062855868339538575, | |
0.0063103861808776855, | |
0.006275025844573975, | |
0.006288627147674561, | |
0.006292945861816406, | |
0.00628398609161377, | |
0.00628046703338623, | |
0.00630686616897583, | |
0.006311185836791992, | |
0.006274706840515136, | |
0.0062822260856628415, | |
0.0063151860237121585, | |
0.006327667236328125, | |
0.006308306217193604, | |
0.006295186042785644, | |
0.0063089470863342285, | |
0.006313906192779541, | |
0.006353265762329101, | |
0.006330706119537353, | |
0.006369586944580078, | |
0.006368145942687988, | |
0.006362706184387207, | |
0.006370066165924072, | |
0.006304626941680908, | |
0.006346066951751709, | |
0.0063244662284851075, | |
0.00633966588973999, | |
0.006342226982116699, | |
0.00633726692199707, | |
0.006364625930786133, | |
0.006337106227874756, | |
0.006362545967102051, | |
0.006333907127380371, | |
0.006338706970214844, | |
0.006352466106414795, | |
0.006327186107635498, | |
0.006528945922851562, | |
0.006341425895690918, | |
0.006321267127990722, | |
0.0063463859558105466, | |
0.00633742618560791, | |
0.00636366605758667, | |
0.0063351869583129885, | |
0.006326385974884033, | |
0.006314385890960694, | |
0.006325106143951416, | |
0.006349427223205566, | |
0.006357585906982422, | |
0.006341906070709229, | |
0.006360785961151123, | |
0.0063302268981933595, | |
0.007241744041442871, | |
0.007114543914794922, | |
0.0071068649291992185, | |
0.007093104839324951, | |
0.007156943798065186, | |
0.006669106006622315, | |
0.006572784900665284, | |
0.006542545795440674, | |
0.006691346168518067, | |
0.006611985206604004, | |
0.006568146228790283, | |
0.006669745922088623, | |
0.006663825035095215, | |
0.006575185775756836, | |
0.00675902509689331, | |
0.006654866218566894, | |
0.006563186168670654, | |
0.006660464763641357, | |
0.006430706024169922, | |
0.006641265869140625, | |
0.006353265762329101, | |
0.006368465900421142, | |
0.006364945888519287, | |
0.006374547004699707, | |
0.006348466873168946, | |
0.0063628659248352055, | |
0.006317905902862549, | |
0.006359985828399658, | |
0.0063566269874572754, | |
0.006328626155853272, | |
0.006360946178436279, | |
0.006376626014709473, | |
0.006334706783294678, | |
0.006379186153411865, | |
0.006378546237945557, | |
0.006371185779571533, | |
0.006380946159362793, | |
0.006375986099243164, | |
0.006378067016601562, | |
0.006338706016540527, | |
0.006356626033782959, | |
0.006341906070709229, | |
0.006335987091064453, | |
0.006324306964874267, | |
0.00639806604385376, | |
0.006342545986175537, | |
0.006352625846862793, | |
0.006352305889129639, | |
0.006333106994628906, | |
0.006334866046905517, | |
0.0063775858879089355, | |
0.0063483061790466306, | |
0.006348627090454102, | |
0.006357107162475586, | |
0.006346065998077393, | |
0.006361745834350586, | |
0.006352625846862793, | |
0.006350226879119873, | |
0.006363986015319824, | |
0.006334065914154053, | |
0.006366226196289062, | |
0.006354386806488037, | |
0.0063143868446350095, | |
0.0063275060653686524, | |
0.006321425914764404, | |
0.006362226963043213, | |
0.006352305889129639, | |
0.006337106227874756, | |
0.006347186088562011, | |
0.006331986904144287, | |
0.00635038709640503, | |
0.006339826107025146, | |
0.006329745769500733, | |
0.006322707176208496, | |
0.006331185817718506, | |
0.006357746124267578, | |
0.006320946216583252, | |
0.006372467041015625, | |
0.006332306861877441, | |
0.006341586112976074, | |
0.00636206579208374, | |
0.00635214614868164, | |
0.0063518271446228024, | |
0.006361266136169433, | |
0.006662545204162598, | |
0.006384946823120117, | |
0.00632542610168457, | |
0.006323986053466797, | |
0.006325106143951416, | |
0.00631070613861084, | |
0.006335825920104981, | |
0.006338066101074219, | |
0.006369585990905762, | |
0.006337906837463379, | |
0.006299665927886963 | |
], | |
"count": 156, | |
"total": 0.997443925857544, | |
"mean": 0.006393871319599641, | |
"p50": 0.006346226453781127, | |
"p90": 0.00657398533821106, | |
"p95": 0.006669265985488892, | |
"p99": 0.007133623862266541, | |
"stdev": 0.00016606654170160805, | |
"stdev_": 2.597276882826076 | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 312.7995388129319 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |