rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
a7d305f
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "6", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.2", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1071.992832, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 46683.734016, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 8.39650390625, | |
"mean": 8.39650390625, | |
"stdev": 0.0, | |
"p50": 8.39650390625, | |
"p90": 8.39650390625, | |
"p95": 8.39650390625, | |
"p99": 8.39650390625, | |
"values": [ | |
8.39650390625 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1243.93472, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 183186.755584, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 160, | |
"total": 0.995445397377014, | |
"mean": 0.0062215337336063384, | |
"stdev": 0.000456799279503595, | |
"p50": 0.006153687477111817, | |
"p90": 0.0062033679008483885, | |
"p95": 0.006309408044815062, | |
"p99": 0.007674444518089288, | |
"values": [ | |
0.006554567813873291, | |
0.006126088142395019, | |
0.011216334342956543, | |
0.006950088024139404, | |
0.006150887012481689, | |
0.006138408184051513, | |
0.00614160680770874, | |
0.0061848068237304685, | |
0.006188007831573486, | |
0.006156328201293945, | |
0.006231367111206055, | |
0.006134568214416504, | |
0.006122568130493164, | |
0.006139206886291504, | |
0.006131688117980957, | |
0.006096807956695557, | |
0.006105126857757569, | |
0.006082888126373291, | |
0.006099846839904785, | |
0.006132647037506104, | |
0.00608784818649292, | |
0.006148966789245605, | |
0.006138887882232666, | |
0.006104648113250732, | |
0.006110406875610351, | |
0.006118408203125, | |
0.00614080810546875, | |
0.0061163268089294434, | |
0.006138247966766357, | |
0.006141448020935058, | |
0.006926887989044189, | |
0.008716811180114746, | |
0.006113128185272217, | |
0.0060870471000671385, | |
0.00614304780960083, | |
0.006109288215637207, | |
0.006120007038116455, | |
0.006142248153686523, | |
0.0061352081298828125, | |
0.006122726917266846, | |
0.006092967033386231, | |
0.00613808822631836, | |
0.006129127025604248, | |
0.006122568130493164, | |
0.006121287822723389, | |
0.006160646915435791, | |
0.006182248115539551, | |
0.006515528202056885, | |
0.0062777681350708004, | |
0.006202407836914062, | |
0.006168168067932129, | |
0.006188967227935791, | |
0.00618560791015625, | |
0.006170087814331054, | |
0.006160646915435791, | |
0.006169928073883057, | |
0.006185448169708252, | |
0.006127847194671631, | |
0.006215047836303711, | |
0.006144008159637451, | |
0.006183688163757324, | |
0.006162086963653565, | |
0.006143847942352295, | |
0.006172647953033447, | |
0.00620000696182251, | |
0.00617136812210083, | |
0.00620960807800293, | |
0.006161927223205566, | |
0.0061961669921875, | |
0.006180808067321777, | |
0.006182087898254395, | |
0.006207686901092529, | |
0.006180168151855469, | |
0.0061540880203247075, | |
0.006151047229766846, | |
0.0061828880310058594, | |
0.006577767848968506, | |
0.006237927913665771, | |
0.006202888011932373, | |
0.006183207988739014, | |
0.0061769671440124515, | |
0.006187528133392334, | |
0.006158247947692871, | |
0.006222086906433106, | |
0.006179526805877685, | |
0.006180647850036621, | |
0.006195847988128662, | |
0.006126087188720703, | |
0.0061846480369567875, | |
0.00614896821975708, | |
0.006166566848754882, | |
0.006139207839965821, | |
0.006143847942352295, | |
0.006152647018432617, | |
0.006149447917938232, | |
0.006172167778015137, | |
0.006152966976165772, | |
0.006158247947692871, | |
0.0061974477767944335, | |
0.0061657681465148925, | |
0.006182086944580078, | |
0.006156167984008789, | |
0.006108168125152588, | |
0.006149607181549072, | |
0.006115528106689453, | |
0.00614848804473877, | |
0.006157927036285401, | |
0.0061633682250976565, | |
0.006200007915496826, | |
0.006160967826843262, | |
0.006148326873779297, | |
0.006150887966156006, | |
0.00617952823638916, | |
0.00616176700592041, | |
0.006172967910766602, | |
0.006152488231658935, | |
0.00616176700592041, | |
0.006148808002471924, | |
0.00615504789352417, | |
0.006157126903533936, | |
0.006153927803039551, | |
0.0061590480804443355, | |
0.006144166946411133, | |
0.006137127876281738, | |
0.006136328220367432, | |
0.006153447151184082, | |
0.006161287784576416, | |
0.00614080810546875, | |
0.00616944694519043, | |
0.006139368057250977, | |
0.006161128044128418, | |
0.006137287139892578, | |
0.006169767856597901, | |
0.00615552806854248, | |
0.006175848007202148, | |
0.006164166927337646, | |
0.006139207839965821, | |
0.00612224817276001, | |
0.0061323270797729494, | |
0.006131368160247803, | |
0.006143846988677979, | |
0.006138566970825195, | |
0.006106728076934814, | |
0.006144967079162598, | |
0.00612064790725708, | |
0.006141128063201905, | |
0.0061468868255615235, | |
0.006141128063201905, | |
0.0063846478462219235, | |
0.006305448055267334, | |
0.006152008056640625, | |
0.006185447216033936, | |
0.006142087936401367, | |
0.006147367000579834, | |
0.006136487007141114, | |
0.006116007804870606, | |
0.006156167984008789, | |
0.006183846950531006, | |
0.006145287990570068, | |
0.006186408042907715 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 160.7320707108576 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |