rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
56492e1
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.3.1", | |
"optimum_benchmark_commit": "f837debaab7bc93d6490c10e065312a52e57b795", | |
"transformers_version": "4.42.4", | |
"transformers_commit": null, | |
"accelerate_version": "0.32.1", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1061.261312, | |
"max_global_vram": 777.70752, | |
"max_process_vram": 127883.730944, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.30752 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 155, | |
"total": 0.9987684092521671, | |
"mean": 0.0064436671564655915, | |
"stdev": 6.704029308589116e-05, | |
"p50": 0.006430523872375488, | |
"p90": 0.006534684181213379, | |
"p95": 0.006573035764694214, | |
"p99": 0.0066514932346344, | |
"values": [ | |
0.006632283210754395, | |
0.006518844127655029, | |
0.0063238048553466795, | |
0.006455803871154785, | |
0.006338685035705566, | |
0.006446524143218994, | |
0.006352924823760987, | |
0.006232285022735596, | |
0.0064106850624084475, | |
0.006346845149993897, | |
0.006380284786224365, | |
0.00639740514755249, | |
0.006337244987487793, | |
0.006397244930267334, | |
0.00640348482131958, | |
0.00639980411529541, | |
0.0064345250129699706, | |
0.006674044132232666, | |
0.0064626851081848145, | |
0.006420605182647705, | |
0.0064420437812805174, | |
0.006448764801025391, | |
0.0064449248313903805, | |
0.006427164077758789, | |
0.006429884910583496, | |
0.006566843032836914, | |
0.006591164112091065, | |
0.006574042797088623, | |
0.0065148439407348635, | |
0.006500924110412597, | |
0.0065118041038513185, | |
0.006472764015197754, | |
0.0064598040580749515, | |
0.006486363887786865, | |
0.006447164058685303, | |
0.006444604873657227, | |
0.0064438037872314455, | |
0.006425724029541015, | |
0.0064412450790405276, | |
0.006415483951568604, | |
0.00651116418838501, | |
0.006515803813934326, | |
0.006478363990783691, | |
0.006450523853302002, | |
0.006434205055236816, | |
0.006419804096221924, | |
0.0064476442337036135, | |
0.006406684875488281, | |
0.006455964088439942, | |
0.006433084011077881, | |
0.0064087648391723635, | |
0.006428924083709717, | |
0.0064345250129699706, | |
0.00643036413192749, | |
0.0064180450439453125, | |
0.006418525218963623, | |
0.0064420437812805174, | |
0.0064399647712707515, | |
0.0064418849945068355, | |
0.006523484230041504, | |
0.006514684200286865, | |
0.006572604179382324, | |
0.006575163841247559, | |
0.006553403854370117, | |
0.006529404163360596, | |
0.006576603889465332, | |
0.0065382041931152345, | |
0.0065410838127136234, | |
0.006560924053192138, | |
0.00647948408126831, | |
0.006469404220581055, | |
0.006497243881225586, | |
0.006426363945007324, | |
0.00645420503616333, | |
0.006428764820098877, | |
0.0064420437812805174, | |
0.006447165012359619, | |
0.006403325080871582, | |
0.006438203811645508, | |
0.006404604911804199, | |
0.006439804077148437, | |
0.006428444862365722, | |
0.006425084114074707, | |
0.0065594840049743655, | |
0.0066796431541442875, | |
0.0066300430297851565, | |
0.006550203800201416, | |
0.0065148439407348635, | |
0.006521884918212891, | |
0.006499965190887452, | |
0.006481084823608398, | |
0.006461725234985351, | |
0.006492763996124268, | |
0.006509243965148926, | |
0.0064788441658020016, | |
0.006493884086608887, | |
0.006458364009857178, | |
0.0064316439628601075, | |
0.006417405128479004, | |
0.006409083843231201, | |
0.006430045127868652, | |
0.006415324211120605, | |
0.0064462051391601565, | |
0.00648476505279541, | |
0.006428284168243408, | |
0.006459963798522949, | |
0.006451964855194092, | |
0.006477564811706543, | |
0.006430523872375488, | |
0.006403325080871582, | |
0.006424284934997558, | |
0.006432764053344727, | |
0.006439805030822754, | |
0.006422045230865478, | |
0.006399003982543945, | |
0.006427645206451416, | |
0.0063961238861083985, | |
0.006398204803466797, | |
0.006387643814086914, | |
0.006395164966583252, | |
0.006373085021972656, | |
0.006382044792175293, | |
0.006363325119018555, | |
0.006388444900512696, | |
0.006387965202331543, | |
0.006398365020751953, | |
0.006374524116516113, | |
0.0064087648391723635, | |
0.006398364067077637, | |
0.006396444797515869, | |
0.00637452507019043, | |
0.006397084236145019, | |
0.006390685081481933, | |
0.006433403968811035, | |
0.00637068510055542, | |
0.006359004974365234, | |
0.0063956441879272465, | |
0.006365884780883789, | |
0.006406044960021973, | |
0.006355484962463379, | |
0.0063641247749328615, | |
0.006395804882049561, | |
0.006383803844451905, | |
0.006385405063629151, | |
0.006404284000396728, | |
0.0063844451904296875, | |
0.006405244827270508, | |
0.006358365058898926, | |
0.006421724796295166, | |
0.006461244106292724, | |
0.006426362991333008, | |
0.006409245014190674, | |
0.0063956441879272465, | |
0.006382205009460449, | |
0.006388605117797852 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 155.1911319622705 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |