rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
d134148
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": "9a8b29987613b8b04c221447a49b37ee314548ff", | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1061.072896, | |
"max_global_vram": 778.256384, | |
"max_process_vram": 157937.39776, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.30752 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 155, | |
"total": 0.9973683018684386, | |
"mean": 0.00643463420560283, | |
"stdev": 0.000273631736181437, | |
"p50": 0.006365777015686035, | |
"p90": 0.006790834712982178, | |
"p95": 0.006931091022491455, | |
"p99": 0.007284669666290288, | |
"values": [ | |
0.0076096210479736325, | |
0.006539857864379882, | |
0.006474256992340088, | |
0.006242416858673096, | |
0.006233457088470459, | |
0.006221617221832275, | |
0.006234577178955078, | |
0.006222736835479736, | |
0.006232656955718994, | |
0.0061974568367004395, | |
0.006202897071838379, | |
0.00648209810256958, | |
0.006195536136627197, | |
0.0061347360610961915, | |
0.006178256988525391, | |
0.006188336849212646, | |
0.006192337036132813, | |
0.006147537231445312, | |
0.006533777236938477, | |
0.006214736938476562, | |
0.006198737144470215, | |
0.006183856964111328, | |
0.006210096836090088, | |
0.006320656776428223, | |
0.006163376808166504, | |
0.006172657012939453, | |
0.0062347369194030765, | |
0.0062366571426391605, | |
0.006262257099151612, | |
0.006270417213439941, | |
0.00621217679977417, | |
0.006214416980743408, | |
0.006184657096862793, | |
0.006230737209320068, | |
0.0062262568473815914, | |
0.006211377143859863, | |
0.006247537136077881, | |
0.006281776905059815, | |
0.006212337017059326, | |
0.006293937206268311, | |
0.006172815799713135, | |
0.006263856887817383, | |
0.006356976985931397, | |
0.006348337173461914, | |
0.0063057770729064945, | |
0.006280497074127197, | |
0.0063038568496704105, | |
0.006226097106933594, | |
0.006200336933135987, | |
0.006484976768493652, | |
0.0069304189682006835, | |
0.006583377838134765, | |
0.006428177833557129, | |
0.0064934568405151365, | |
0.006510577201843262, | |
0.006560818195343017, | |
0.006446417808532715, | |
0.006762897968292237, | |
0.006534897804260254, | |
0.006570737838745117, | |
0.006467856884002685, | |
0.006724818229675293, | |
0.006608017921447754, | |
0.006499378204345703, | |
0.006496177196502685, | |
0.006604177951812744, | |
0.006440338134765625, | |
0.006489937782287598, | |
0.0063417768478393555, | |
0.006328176975250244, | |
0.0063185768127441404, | |
0.006152496814727783, | |
0.006365777015686035, | |
0.006397936820983887, | |
0.006254577159881592, | |
0.006341136932373047, | |
0.006171856880187988, | |
0.006478737831115722, | |
0.0062824168205261235, | |
0.006185777187347412, | |
0.006308496952056885, | |
0.006642578125, | |
0.0068784189224243165, | |
0.00693441915512085, | |
0.006416656970977783, | |
0.0061369771957397465, | |
0.006152816772460937, | |
0.006137775897979736, | |
0.006341617107391357, | |
0.006174417018890381, | |
0.0061604962348937985, | |
0.006144657135009766, | |
0.006173296928405762, | |
0.0061376171112060545, | |
0.006195217132568359, | |
0.006275537014007568, | |
0.0065862579345703125, | |
0.006944018840789795, | |
0.006939857959747314, | |
0.0069600181579589845, | |
0.006587377071380615, | |
0.006570736885070801, | |
0.006588657855987549, | |
0.006905619144439697, | |
0.0068444981575012205, | |
0.007007859230041504, | |
0.007816340923309326, | |
0.006590258121490478, | |
0.006720017910003662, | |
0.006606897830963135, | |
0.006495697975158692, | |
0.006528977870941162, | |
0.006494097232818604, | |
0.006494418144226074, | |
0.006542257785797119, | |
0.006195056915283203, | |
0.006182096958160401, | |
0.006190896034240723, | |
0.006238895893096924, | |
0.00637265682220459, | |
0.006195376873016358, | |
0.0062329769134521484, | |
0.006236816883087158, | |
0.006227857112884522, | |
0.0061804971694946285, | |
0.006248336791992187, | |
0.006651857852935791, | |
0.006414896965026855, | |
0.006416818141937256, | |
0.006405617237091064, | |
0.006886579036712646, | |
0.00679249906539917, | |
0.006911538124084473, | |
0.006653937816619873, | |
0.006780337810516357, | |
0.006546257972717285, | |
0.006619217872619629, | |
0.00663009786605835, | |
0.006932659149169922, | |
0.006922099113464355, | |
0.006445937156677246, | |
0.006782419204711914, | |
0.00656513786315918, | |
0.006473456859588623, | |
0.006212656974792481, | |
0.006254897117614746, | |
0.006441137790679932, | |
0.006642097949981689, | |
0.006433457851409912, | |
0.006788338184356689, | |
0.006758418083190918, | |
0.006438416957855225, | |
0.006567536830902099, | |
0.00666769790649414, | |
0.006433137893676758 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 155.4089895474198 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |