rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
b2f7766
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "5", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.2", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1073.270784, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 47924.85888, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 8.21366796875, | |
"mean": 8.21366796875, | |
"stdev": 0.0, | |
"p50": 8.21366796875, | |
"p90": 8.21366796875, | |
"p95": 8.21366796875, | |
"p99": 8.21366796875, | |
"values": [ | |
8.21366796875 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1243.910144, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 191484.669952, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 158, | |
"total": 0.9955480823516838, | |
"mean": 0.006300937230073953, | |
"stdev": 0.0007674622721390403, | |
"p50": 0.006153346061706543, | |
"p90": 0.006863855457305909, | |
"p95": 0.006906760144233704, | |
"p99": 0.006969039378166199, | |
"values": [ | |
0.006692945003509521, | |
0.006477584838867187, | |
0.006705265045166015, | |
0.0066502242088317875, | |
0.0068643040657043455, | |
0.006890223979949951, | |
0.00676254415512085, | |
0.006858863830566406, | |
0.006717424869537353, | |
0.006490865230560303, | |
0.006423025131225586, | |
0.0063643050193786625, | |
0.006414225101470947, | |
0.006344785213470459, | |
0.006322865962982178, | |
0.006275825023651123, | |
0.006230704784393311, | |
0.006247345924377441, | |
0.014466687202453614, | |
0.003999670028686523, | |
0.003975511074066162, | |
0.0041137499809265135, | |
0.006275666236877441, | |
0.006199824810028076, | |
0.006231985092163086, | |
0.0062020659446716305, | |
0.006213584899902344, | |
0.006247506141662597, | |
0.006178544998168945, | |
0.0061996660232543944, | |
0.006183986186981201, | |
0.006302064895629883, | |
0.0061689457893371585, | |
0.006120785236358643, | |
0.006093746185302735, | |
0.006124946117401123, | |
0.006139985084533691, | |
0.006142226219177246, | |
0.0061495847702026365, | |
0.006132785797119141, | |
0.006069906234741211, | |
0.006137425899505615, | |
0.006138545036315918, | |
0.006157745838165283, | |
0.006113426208496094, | |
0.006108945846557618, | |
0.006105906009674073, | |
0.0061563048362731935, | |
0.006141585826873779, | |
0.006126706123352051, | |
0.006109105110168457, | |
0.006116625785827637, | |
0.006128945827484131, | |
0.006157586097717285, | |
0.006118865966796875, | |
0.006135184764862061, | |
0.006121585845947266, | |
0.0061393451690673825, | |
0.006141746044158935, | |
0.006406384944915772, | |
0.006179825782775879, | |
0.006354705810546875, | |
0.006383184909820557, | |
0.006331504821777344, | |
0.006278224945068359, | |
0.006163506031036377, | |
0.006165105819702149, | |
0.006144306182861328, | |
0.006174705028533936, | |
0.006202546119689942, | |
0.006205585956573487, | |
0.006147665977478027, | |
0.006149745941162109, | |
0.00627102518081665, | |
0.00613006591796875, | |
0.006166065216064453, | |
0.006393744945526123, | |
0.006193426132202149, | |
0.006195024967193603, | |
0.006153106212615967, | |
0.006166705131530762, | |
0.006139505863189698, | |
0.006153585910797119, | |
0.006155025959014892, | |
0.006136946201324463, | |
0.006154705047607422, | |
0.0061353459358215335, | |
0.006140305042266846, | |
0.006150866031646728, | |
0.006348625183105468, | |
0.0061372661590576175, | |
0.006102066040039062, | |
0.006134706020355225, | |
0.006107504844665527, | |
0.006132145881652832, | |
0.0061286258697509765, | |
0.006135825157165527, | |
0.006146545886993408, | |
0.006142065048217773, | |
0.006126545906066895, | |
0.006152945995330811, | |
0.006132945060729981, | |
0.006127026081085205, | |
0.006103024959564209, | |
0.0060755062103271485, | |
0.006145746231079102, | |
0.006132465839385986, | |
0.006128466129302979, | |
0.006116465091705322, | |
0.006148786067962647, | |
0.006100786209106445, | |
0.006124145984649658, | |
0.006119825839996338, | |
0.006140144824981689, | |
0.006126706123352051, | |
0.006128945827484131, | |
0.006110225200653076, | |
0.006107666015625, | |
0.006124306201934815, | |
0.006093585968017578, | |
0.006117586135864258, | |
0.006132784843444824, | |
0.0061212658882141115, | |
0.0060926260948181155, | |
0.00609422492980957, | |
0.0060711860656738285, | |
0.006181106090545654, | |
0.006106544971466064, | |
0.006122546195983887, | |
0.0061539049148559575, | |
0.006124306201934815, | |
0.006124626159667969, | |
0.006127345085144043, | |
0.006108625888824463, | |
0.0060931057929992675, | |
0.006527023792266846, | |
0.006967663764953613, | |
0.006903024196624756, | |
0.0069089441299438475, | |
0.006928783893585205, | |
0.006886544227600097, | |
0.0069139041900634765, | |
0.006970862865447998, | |
0.006403985023498535, | |
0.006519505023956299, | |
0.006860464096069336, | |
0.0068764638900756835, | |
0.006907983779907227, | |
0.00688862419128418, | |
0.006876624107360839, | |
0.006863663196563721, | |
0.0069065442085266115, | |
0.006854063987731933, | |
0.006919983863830566, | |
0.006787344932556152, | |
0.0063196649551391605, | |
0.006333905220031738, | |
0.0062243051528930666 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 158.70654848409959 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |