rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
0243f67
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.4.1+rocm6.1", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "4", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1385.541632, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 0.0, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 7.69084716796875, | |
"mean": 7.69084716796875, | |
"stdev": 0.0, | |
"p50": 7.69084716796875, | |
"p90": 7.69084716796875, | |
"p95": 7.69084716796875, | |
"p99": 7.69084716796875, | |
"values": [ | |
7.69084716796875 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1556.6848, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 0.0, | |
"max_reserved": 406.847488, | |
"max_allocated": 354.740224 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 176, | |
"total": 0.9945492057800291, | |
"mean": 0.005650847760113802, | |
"stdev": 0.0002186643981938099, | |
"p50": 0.005551263570785522, | |
"p90": 0.00594414210319519, | |
"p95": 0.006152581691741943, | |
"p99": 0.006318581819534302, | |
"values": [ | |
0.0061263818740844725, | |
0.006006221771240234, | |
0.005968622207641602, | |
0.0057971029281616215, | |
0.005802543163299561, | |
0.005821742057800293, | |
0.005943982124328613, | |
0.005953903198242187, | |
0.005940302848815918, | |
0.005911342144012451, | |
0.00594910192489624, | |
0.005900302886962891, | |
0.005846063137054444, | |
0.005835503101348877, | |
0.00581022310256958, | |
0.005860143184661866, | |
0.0058564629554748535, | |
0.0058191828727722165, | |
0.005804623126983642, | |
0.00580590295791626, | |
0.005721902847290039, | |
0.005692782878875732, | |
0.005656144142150879, | |
0.0056620631217956545, | |
0.005669582843780518, | |
0.005671503067016602, | |
0.005661423206329346, | |
0.005680462837219238, | |
0.005642862796783448, | |
0.005653742790222168, | |
0.005658223152160645, | |
0.005629743099212646, | |
0.00564158296585083, | |
0.005631342887878418, | |
0.005670543193817139, | |
0.005727343082427979, | |
0.00572238302230835, | |
0.006842060089111328, | |
0.00572670316696167, | |
0.005485904216766357, | |
0.005500144004821778, | |
0.005515822887420654, | |
0.005532944202423095, | |
0.0056625428199768065, | |
0.005944302082061767, | |
0.005847981929779053, | |
0.0057212629318237306, | |
0.005509583950042724, | |
0.005515343189239502, | |
0.005511184215545654, | |
0.005568943023681641, | |
0.00555422306060791, | |
0.005512303829193115, | |
0.0055275030136108395, | |
0.005540944099426269, | |
0.005524622917175293, | |
0.005504144191741943, | |
0.0055359830856323245, | |
0.005505743980407715, | |
0.005550063133239746, | |
0.005552624225616455, | |
0.005525743961334229, | |
0.005543982982635498, | |
0.005550864219665527, | |
0.0055611028671264644, | |
0.005533103942871094, | |
0.005500304222106934, | |
0.00552558422088623, | |
0.0055297441482543945, | |
0.0055220627784729, | |
0.005504784107208252, | |
0.0055097432136535646, | |
0.005517583847045898, | |
0.005524143218994141, | |
0.005548943996429443, | |
0.005533583164215088, | |
0.005511824131011963, | |
0.0057519831657409665, | |
0.005569583892822265, | |
0.0055396628379821775, | |
0.005542704105377197, | |
0.005542063236236573, | |
0.0055537428855895994, | |
0.005545104026794433, | |
0.005546702861785889, | |
0.005542384147644043, | |
0.005551342964172363, | |
0.00555646276473999, | |
0.005528463840484619, | |
0.005554062843322754, | |
0.005516784191131592, | |
0.005771182060241699, | |
0.005521903991699219, | |
0.005516462802886963, | |
0.0055419039726257325, | |
0.005529582977294922, | |
0.0055364642143249515, | |
0.005524783134460449, | |
0.00551310396194458, | |
0.005543023109436035, | |
0.005521743774414062, | |
0.005521903991699219, | |
0.005544783115386963, | |
0.005537424087524414, | |
0.00551166296005249, | |
0.0055364642143249515, | |
0.005526382923126221, | |
0.0055511841773986815, | |
0.0055302238464355466, | |
0.005552143096923828, | |
0.005554704189300537, | |
0.005511823177337646, | |
0.005557583808898926, | |
0.005525262832641602, | |
0.005534383773803711, | |
0.0055443038940429686, | |
0.005536942958831787, | |
0.005586703777313232, | |
0.005595343112945557, | |
0.005571662902832031, | |
0.005569583892822265, | |
0.005555024147033691, | |
0.00557662296295166, | |
0.0055631837844848635, | |
0.005573904037475586, | |
0.005570542812347412, | |
0.005539184093475342, | |
0.005524464130401611, | |
0.005515984058380127, | |
0.005479663848876953, | |
0.005504464149475098, | |
0.005506543159484864, | |
0.005549424171447754, | |
0.0054918241500854495, | |
0.00548798418045044, | |
0.005509263038635254, | |
0.005463344097137451, | |
0.005460622787475586, | |
0.005465904235839844, | |
0.005486224174499512, | |
0.005509102821350097, | |
0.005496463775634766, | |
0.005510383129119873, | |
0.005542064189910888, | |
0.0055268630981445315, | |
0.005554382801055908, | |
0.005540463924407959, | |
0.005566382884979248, | |
0.0055467038154602055, | |
0.0055630230903625484, | |
0.0055463829040527345, | |
0.005530543804168701, | |
0.0055580630302429195, | |
0.005565904140472412, | |
0.005533584117889404, | |
0.005539342880249023, | |
0.005561903953552246, | |
0.005528462886810303, | |
0.00557390308380127, | |
0.005532303810119629, | |
0.005564463138580322, | |
0.005540944099426269, | |
0.005586543083190918, | |
0.006371020793914795, | |
0.006290541172027588, | |
0.006282861232757568, | |
0.006231181144714356, | |
0.006301102161407471, | |
0.006282701015472412, | |
0.0062966208457946775, | |
0.006299981117248535, | |
0.0059465417861938475, | |
0.00603758192062378, | |
0.005860302925109863, | |
0.006098540782928466, | |
0.005856943130493164 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 176.96459760576897 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |