rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
27121a2
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.4.0+rocm6.1", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "5", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.33.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1386.999808, | |
"max_global_vram": 11.128832, | |
"max_process_vram": 0.0, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 7.83645263671875, | |
"mean": 7.83645263671875, | |
"stdev": 0.0, | |
"p50": 7.83645263671875, | |
"p90": 7.83645263671875, | |
"p95": 7.83645263671875, | |
"p99": 7.83645263671875, | |
"values": [ | |
7.83645263671875 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1557.737472, | |
"max_global_vram": 11.38688, | |
"max_process_vram": 0.0, | |
"max_reserved": 406.847488, | |
"max_allocated": 354.740224 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 160, | |
"total": 0.9980249857902529, | |
"mean": 0.00623765616118908, | |
"stdev": 0.0008198818897457517, | |
"p50": 0.006320936918258666, | |
"p90": 0.00641356086730957, | |
"p95": 0.006483031988143921, | |
"p99": 0.006579219245910644, | |
"values": [ | |
0.005835817813873291, | |
0.005921739101409912, | |
0.005742538928985596, | |
0.005925098896026612, | |
0.005747018814086914, | |
0.005883338928222656, | |
0.005746539115905762, | |
0.005550378799438476, | |
0.005540299892425537, | |
0.00552269983291626, | |
0.005508619785308838, | |
0.005476940155029297, | |
0.005486380100250244, | |
0.005468939781188965, | |
0.005483500003814697, | |
0.00556541919708252, | |
0.0056111798286437985, | |
0.005838698863983154, | |
0.005641418933868408, | |
0.005426380157470703, | |
0.0054177408218383785, | |
0.005445260047912598, | |
0.015368103981018066, | |
0.004619983196258545, | |
0.00463774299621582, | |
0.004622383117675781, | |
0.004615822792053223, | |
0.004839023113250732, | |
0.0058513379096984865, | |
0.005841739177703857, | |
0.005747659206390381, | |
0.005931018829345703, | |
0.0059063777923583985, | |
0.006299497127532959, | |
0.006286376953125, | |
0.006300776958465576, | |
0.006301577091217041, | |
0.006208296775817871, | |
0.006006217956542968, | |
0.006307336807250976, | |
0.00632509708404541, | |
0.00629789686203003, | |
0.006315496921539307, | |
0.006305417060852051, | |
0.0062631769180297855, | |
0.006323816776275634, | |
0.0062802968025207516, | |
0.006306217193603515, | |
0.006310536861419678, | |
0.006294217109680176, | |
0.006300937175750732, | |
0.006295816898345947, | |
0.006290535926818847, | |
0.006279497146606445, | |
0.006315017223358154, | |
0.006290856838226318, | |
0.006287817001342773, | |
0.006270856857299805, | |
0.006312777042388916, | |
0.006329576969146728, | |
0.006462056159973145, | |
0.006315977096557617, | |
0.006300457000732422, | |
0.006359817028045655, | |
0.006301577091217041, | |
0.0063225369453430175, | |
0.006477096080780029, | |
0.0062993369102478025, | |
0.006300776958465576, | |
0.0063225369453430175, | |
0.006295977115631103, | |
0.0062852568626403805, | |
0.006282697200775147, | |
0.006285896778106689, | |
0.006289257049560547, | |
0.0062830171585083, | |
0.006313416004180908, | |
0.006563015937805176, | |
0.006375977039337158, | |
0.006360616207122803, | |
0.006370536804199219, | |
0.0063607759475708, | |
0.0063630170822143554, | |
0.0063273367881774905, | |
0.0063270158767700195, | |
0.006323976993560791, | |
0.006324936866760254, | |
0.006344616889953613, | |
0.006300776958465576, | |
0.006347657203674317, | |
0.006327657222747802, | |
0.006341576099395752, | |
0.006345736980438233, | |
0.006327016830444336, | |
0.00641869592666626, | |
0.006401576042175293, | |
0.006321096897125244, | |
0.0063207769393920895, | |
0.0063084568977355955, | |
0.006310057163238525, | |
0.006299497127532959, | |
0.006326056957244873, | |
0.006344457149505615, | |
0.00631117582321167, | |
0.006333736896514892, | |
0.006319656848907471, | |
0.0063329367637634274, | |
0.0063225369453430175, | |
0.006298057079315186, | |
0.00633549690246582, | |
0.006315176010131836, | |
0.00633277702331543, | |
0.006320296764373779, | |
0.006347177028656006, | |
0.006349896907806397, | |
0.0063678169250488284, | |
0.00634941577911377, | |
0.006319656848907471, | |
0.006344936847686768, | |
0.0063343758583068845, | |
0.006319976806640625, | |
0.006369417190551758, | |
0.006325255870819091, | |
0.006356616973876953, | |
0.006354856967926026, | |
0.006324296951293946, | |
0.0063775768280029295, | |
0.00638477611541748, | |
0.006547976016998291, | |
0.006481895923614502, | |
0.006407017230987549, | |
0.006530695915222168, | |
0.0064591760635375976, | |
0.006364296913146972, | |
0.006360136985778809, | |
0.006413416862487793, | |
0.006538216114044189, | |
0.006504617214202881, | |
0.006403656959533691, | |
0.006413256168365479, | |
0.006602536201477051, | |
0.006390536785125732, | |
0.00636333703994751, | |
0.006394217014312744, | |
0.006563015937805176, | |
0.0063943772315979, | |
0.006382215976715088, | |
0.00636765718460083, | |
0.006420135974884033, | |
0.006342216968536377, | |
0.006421576023101807, | |
0.006384936809539795, | |
0.006407815933227539, | |
0.006383976936340332, | |
0.006364457130432129, | |
0.0063857359886169435, | |
0.006362215995788574, | |
0.0064148569107055665, | |
0.006395336151123047, | |
0.006357896804809571 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 160.31662761760353 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |