rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
0652dd5
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "5", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.2", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1037.058048, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 47097.667584, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 8.18219580078125, | |
"mean": 8.18219580078125, | |
"stdev": 0.0, | |
"p50": 8.18219580078125, | |
"p90": 8.18219580078125, | |
"p95": 8.18219580078125, | |
"p99": 8.18219580078125, | |
"values": [ | |
8.18219580078125 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1208.909824, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 166874.435584, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 154, | |
"total": 0.9963955373764037, | |
"mean": 0.00647010089205457, | |
"stdev": 0.00043413202673694393, | |
"p50": 0.006378705024719239, | |
"p90": 0.006885503816604614, | |
"p95": 0.007071967029571533, | |
"p99": 0.008255520877838133, | |
"values": [ | |
0.006935023784637452, | |
0.006840942859649658, | |
0.006826543807983398, | |
0.007098382949829102, | |
0.007226862907409668, | |
0.007407341957092285, | |
0.007362223148345947, | |
0.007262702941894531, | |
0.0070577430725097656, | |
0.006645584106445312, | |
0.006674543857574463, | |
0.006631984233856201, | |
0.006570864200592041, | |
0.006720943927764893, | |
0.006701903820037842, | |
0.007040782928466797, | |
0.0067905430793762205, | |
0.006728623867034912, | |
0.006671343803405762, | |
0.006481424808502197, | |
0.006581425189971924, | |
0.006573103904724121, | |
0.006515823841094971, | |
0.006642384052276611, | |
0.006625904083251953, | |
0.006491505146026612, | |
0.00665102481842041, | |
0.00652110481262207, | |
0.006339984893798828, | |
0.0092119779586792, | |
0.00672270393371582, | |
0.00941197681427002, | |
0.006302384853363037, | |
0.006188145160675049, | |
0.006322384834289551, | |
0.006153264999389649, | |
0.0061515049934387205, | |
0.006102066040039062, | |
0.0061655850410461425, | |
0.006125585079193115, | |
0.006066384792327881, | |
0.0061417450904846195, | |
0.006394225120544434, | |
0.006160624980926514, | |
0.006115025043487549, | |
0.006339505195617676, | |
0.006098704814910889, | |
0.0063454251289367675, | |
0.006194544792175293, | |
0.006184784889221191, | |
0.006202225208282471, | |
0.006192945003509522, | |
0.0063644652366638185, | |
0.0062531051635742185, | |
0.006207664966583252, | |
0.006213584899902344, | |
0.0061863851547241215, | |
0.00653326416015625, | |
0.006283665180206299, | |
0.006419664859771729, | |
0.0062068657875061035, | |
0.006209586143493652, | |
0.006363183975219727, | |
0.006753583908081054, | |
0.0065828638076782225, | |
0.006351665019989013, | |
0.006494383811950683, | |
0.006406384944915772, | |
0.00640334415435791, | |
0.006518704891204834, | |
0.006336463928222656, | |
0.0063718252182006834, | |
0.006376945018768311, | |
0.0063606247901916504, | |
0.006197104930877686, | |
0.006206705093383789, | |
0.006321744918823242, | |
0.0066153440475463865, | |
0.0061612648963928224, | |
0.00622414493560791, | |
0.006197905063629151, | |
0.0062059049606323245, | |
0.006210545063018799, | |
0.006192144870758057, | |
0.006211184978485108, | |
0.006337265014648438, | |
0.0065449438095092774, | |
0.006502864837646484, | |
0.006548463821411133, | |
0.0065297441482543945, | |
0.006585423946380615, | |
0.0065283050537109375, | |
0.006461744785308838, | |
0.006380465030670166, | |
0.006547345161437988, | |
0.006366864204406738, | |
0.006673423767089843, | |
0.006559343814849854, | |
0.00639230489730835, | |
0.006510384082794189, | |
0.006571984767913818, | |
0.006387825012207031, | |
0.006549903869628906, | |
0.006501904964447022, | |
0.006420945167541504, | |
0.006347185134887696, | |
0.006213426113128662, | |
0.006198386192321778, | |
0.00618318510055542, | |
0.006354544162750244, | |
0.006171824932098388, | |
0.006185266017913818, | |
0.006201265811920166, | |
0.006159345149993897, | |
0.006174705028533936, | |
0.006190064907073974, | |
0.0061703848838806155, | |
0.006188944816589355, | |
0.006145585060119629, | |
0.006120145797729492, | |
0.006109745979309082, | |
0.006139025211334228, | |
0.006107985019683838, | |
0.006105906009674073, | |
0.006154386043548584, | |
0.006112784862518311, | |
0.006114224910736084, | |
0.006078705787658691, | |
0.006123025894165039, | |
0.006116785049438476, | |
0.006112784862518311, | |
0.006419825077056885, | |
0.00690798282623291, | |
0.006889584064483642, | |
0.00689566421508789, | |
0.006945743083953858, | |
0.0069139041900634765, | |
0.006351025104522705, | |
0.006541264057159424, | |
0.006585905075073242, | |
0.006463345050811768, | |
0.006543663978576661, | |
0.007103823184967041, | |
0.006875983238220215, | |
0.006730544090270996, | |
0.006619984149932861, | |
0.006555503845214844, | |
0.006508464813232422, | |
0.0065063838958740235, | |
0.006450705051422119, | |
0.006162385940551757, | |
0.006117104053497314, | |
0.0061319851875305175, | |
0.006139184951782227 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 154.55709527312356 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |