rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
![IlyasMoutawwakil's picture](https://cdn-avatars.huggingface.co/v1/production/uploads/1642598610696-noauth.jpeg)
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
af02f97
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "4", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.2", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1073.78688, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 46984.11008, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 8.863576171875, | |
"mean": 8.863576171875, | |
"stdev": 0.0, | |
"p50": 8.863576171875, | |
"p90": 8.863576171875, | |
"p95": 8.863576171875, | |
"p99": 8.863576171875, | |
"values": [ | |
8.863576171875 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1244.135424, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 187569.926144, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 163, | |
"total": 0.9991826479434969, | |
"mean": 0.0061299548953588755, | |
"stdev": 0.0007998408717607258, | |
"p50": 0.0060190119743347165, | |
"p90": 0.0066532501220703125, | |
"p95": 0.0066956017494201655, | |
"p99": 0.00674781229019165, | |
"values": [ | |
0.006368930816650391, | |
0.006285572052001953, | |
0.015021531105041504, | |
0.004002061843872071, | |
0.003999982118606567, | |
0.0042954998016357425, | |
0.006396611213684082, | |
0.006238050937652588, | |
0.0062794919013977055, | |
0.006008771896362305, | |
0.0061914920806884766, | |
0.0060190119743347165, | |
0.006079173088073731, | |
0.0057524538040161135, | |
0.005740454196929931, | |
0.00575805377960205, | |
0.005755653858184814, | |
0.005722854137420654, | |
0.005733414173126221, | |
0.0059246129989624025, | |
0.005991971969604492, | |
0.005996452808380127, | |
0.005987011909484864, | |
0.006244451999664307, | |
0.006079812049865723, | |
0.005957413196563721, | |
0.006091813087463379, | |
0.005965732097625733, | |
0.0061095719337463375, | |
0.005927972793579102, | |
0.005947652816772461, | |
0.0057335739135742185, | |
0.005738214015960693, | |
0.005738214015960693, | |
0.006523489952087402, | |
0.006456931114196777, | |
0.006050692081451416, | |
0.006008452892303466, | |
0.006039651870727539, | |
0.0060361318588256834, | |
0.006125892162322998, | |
0.006332770824432373, | |
0.00604397201538086, | |
0.006150052070617675, | |
0.00604013204574585, | |
0.006025572776794433, | |
0.006308610916137695, | |
0.00611597204208374, | |
0.006071812152862549, | |
0.006067492008209228, | |
0.0060782132148742675, | |
0.006063652992248535, | |
0.0060526118278503415, | |
0.0060361318588256834, | |
0.006015973091125488, | |
0.0060098919868469235, | |
0.006015333175659179, | |
0.005982532978057862, | |
0.006010532855987549, | |
0.005997572898864746, | |
0.0060782132148742675, | |
0.006487649917602539, | |
0.006063491821289063, | |
0.006066372871398926, | |
0.0060335731506347655, | |
0.0060166120529174805, | |
0.006012773036956787, | |
0.006205571174621582, | |
0.006538051128387451, | |
0.0064950108528137205, | |
0.006451489925384521, | |
0.006356131076812744, | |
0.006102851867675781, | |
0.006495009899139404, | |
0.006088932991027832, | |
0.006228611946105957, | |
0.006439651012420654, | |
0.00619677209854126, | |
0.00626397180557251, | |
0.006157092094421387, | |
0.006352931022644043, | |
0.006207332134246826, | |
0.0060308518409729, | |
0.005863653182983398, | |
0.005853253841400147, | |
0.005857734203338623, | |
0.0058537330627441405, | |
0.005845572948455811, | |
0.005913252830505371, | |
0.006078532218933105, | |
0.005858693122863769, | |
0.005868614196777344, | |
0.006103652954101563, | |
0.005876453876495361, | |
0.005855972766876221, | |
0.005848453044891357, | |
0.0058817329406738285, | |
0.005847493171691895, | |
0.0058910131454467775, | |
0.0058836531639099125, | |
0.005864294052124024, | |
0.0058423738479614256, | |
0.005885572910308838, | |
0.005856452941894531, | |
0.00584717321395874, | |
0.005885252952575684, | |
0.005881252765655517, | |
0.005893733024597168, | |
0.005889412879943848, | |
0.005862052917480469, | |
0.005902692794799804, | |
0.005876452922821045, | |
0.005925093173980713, | |
0.005864452838897705, | |
0.0058769330978393555, | |
0.005855174064636231, | |
0.005887333869934082, | |
0.005858373165130615, | |
0.005874853134155273, | |
0.005897253036499023, | |
0.005861732959747314, | |
0.00585709285736084, | |
0.0059178929328918455, | |
0.005922213077545166, | |
0.005925892829895019, | |
0.006088292121887207, | |
0.005911013126373291, | |
0.005916933059692383, | |
0.005959332942962647, | |
0.005925093173980713, | |
0.005930373191833496, | |
0.006015492916107178, | |
0.005890532970428467, | |
0.0059503731727600095, | |
0.005926053047180176, | |
0.005914854049682617, | |
0.005909412860870361, | |
0.0062441320419311525, | |
0.006693728923797607, | |
0.006654850006103516, | |
0.006729249000549316, | |
0.006682848930358887, | |
0.006360291004180908, | |
0.006318850994110108, | |
0.006755649089813232, | |
0.006695809841156006, | |
0.006688769817352295, | |
0.006743009090423584, | |
0.006362211227416992, | |
0.006204291820526123, | |
0.006601250171661377, | |
0.006641250133514404, | |
0.006682848930358887, | |
0.006653570175170899, | |
0.006720448970794678, | |
0.006341570854187011, | |
0.006651969909667968, | |
0.006704289913177491, | |
0.006741408824920654, | |
0.006663330078125, | |
0.00665900993347168, | |
0.006710527896881103, | |
0.0066346898078918455 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 163.1333373687826 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |