rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
3c0755c
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.3.0", | |
"optimum_benchmark_commit": "57f6495c03ea0fa48e157048c97add150dcd765c", | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1060.950016, | |
"max_global_vram": 777.715712, | |
"max_process_vram": 153462.419456, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.30752 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 157, | |
"total": 0.9960449347496033, | |
"mean": 0.006344235253182186, | |
"stdev": 0.00023799862506089799, | |
"p50": 0.006316534996032715, | |
"p90": 0.006584466552734375, | |
"p95": 0.0068122709274292, | |
"p99": 0.007298714256286621, | |
"values": [ | |
0.006604529857635498, | |
0.006810926914215088, | |
0.006385654926300049, | |
0.006364375114440918, | |
0.006393973827362061, | |
0.006558452129364014, | |
0.0064131741523742675, | |
0.006363093852996826, | |
0.006336054801940918, | |
0.0067562079429626465, | |
0.006823566913604737, | |
0.006594930171966553, | |
0.006331415176391602, | |
0.006291895866394043, | |
0.0062696561813354494, | |
0.006275735855102539, | |
0.006316534996032715, | |
0.00657749080657959, | |
0.006396853923797607, | |
0.006346775054931641, | |
0.006341495037078857, | |
0.006320855140686035, | |
0.006347095012664795, | |
0.006311254978179932, | |
0.006328855037689209, | |
0.006407894134521484, | |
0.006381653785705567, | |
0.006336215019226074, | |
0.006375574111938477, | |
0.0063514142036437985, | |
0.006307896137237549, | |
0.0063638949394226076, | |
0.006301976203918457, | |
0.006341334819793701, | |
0.006395894050598145, | |
0.006368215084075928, | |
0.006354135036468506, | |
0.006356215000152588, | |
0.0077320318222045895, | |
0.00739395809173584, | |
0.006315575122833252, | |
0.006557651996612549, | |
0.006420053958892822, | |
0.006553010940551758, | |
0.006622129917144775, | |
0.006321815013885498, | |
0.006305655002593994, | |
0.006282296180725098, | |
0.006389654159545898, | |
0.006489653110504151, | |
0.0063168549537658695, | |
0.006340535163879395, | |
0.00643621301651001, | |
0.006164857864379883, | |
0.006167098045349121, | |
0.006188056945800781, | |
0.006576371192932129, | |
0.00640165376663208, | |
0.006211737155914307, | |
0.006381333827972412, | |
0.006350934982299805, | |
0.006353973865509033, | |
0.006277175903320312, | |
0.006437492847442627, | |
0.006599091053009033, | |
0.006487092971801758, | |
0.006484373092651367, | |
0.006501172065734863, | |
0.006426133155822754, | |
0.0068176469802856444, | |
0.006397334098815918, | |
0.006356215000152588, | |
0.006387733936309815, | |
0.0062155771255493165, | |
0.0061830968856811525, | |
0.006205976963043213, | |
0.006160217761993408, | |
0.006193817138671875, | |
0.006169178009033203, | |
0.0061878981590270995, | |
0.006444693088531494, | |
0.006185176849365234, | |
0.0061707768440246585, | |
0.00620773696899414, | |
0.006362295150756836, | |
0.00616661787033081, | |
0.0061902980804443355, | |
0.006193817138671875, | |
0.006189177036285401, | |
0.006326295852661133, | |
0.006164378166198731, | |
0.006210776805877685, | |
0.006213016986846924, | |
0.006193656921386719, | |
0.006198777198791504, | |
0.006305975914001465, | |
0.006167257785797119, | |
0.006171577930450439, | |
0.006181657791137695, | |
0.006164857864379883, | |
0.0061669378280639646, | |
0.006127577781677246, | |
0.0061459789276123045, | |
0.006094939231872558, | |
0.006120378017425537, | |
0.006720528125762939, | |
0.0072238798141479495, | |
0.006440214157104492, | |
0.006404533863067627, | |
0.006395413875579834, | |
0.006501973152160645, | |
0.006329174995422363, | |
0.006521172046661377, | |
0.006365653991699219, | |
0.0061738181114196775, | |
0.00612949800491333, | |
0.00607558012008667, | |
0.006089018821716309, | |
0.006093658924102783, | |
0.006101658821105957, | |
0.0060589399337768555, | |
0.0060958991050720215, | |
0.00610693883895874, | |
0.006125178813934326, | |
0.006129179000854492, | |
0.00611893892288208, | |
0.006137018203735352, | |
0.006197017192840576, | |
0.006176698207855225, | |
0.006172217845916748, | |
0.006315735816955567, | |
0.0068741259574890134, | |
0.00691092586517334, | |
0.006855885982513427, | |
0.006781806945800781, | |
0.006358774185180664, | |
0.006523091793060303, | |
0.006464373111724853, | |
0.00643669319152832, | |
0.006287896156311036, | |
0.006211896896362304, | |
0.006144699096679688, | |
0.006194296836853027, | |
0.0061904568672180174, | |
0.006238776206970215, | |
0.006114778995513916, | |
0.00621157693862915, | |
0.006137338161468506, | |
0.006202298164367676, | |
0.006143578052520752, | |
0.006147898197174072, | |
0.006244697093963623, | |
0.0061995768547058105, | |
0.006169497013092041, | |
0.006200216770172119, | |
0.006155257225036621, | |
0.0061168580055236815 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 157.62341087500073 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |