rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
b36ea37
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "6", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.2", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.3", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1036.73856, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 39640.735744, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 8.3035693359375, | |
"mean": 8.3035693359375, | |
"stdev": 0.0, | |
"p50": 8.3035693359375, | |
"p90": 8.3035693359375, | |
"p95": 8.3035693359375, | |
"p99": 8.3035693359375, | |
"values": [ | |
8.3035693359375 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1208.99584, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 173695.299584, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.303424 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 158, | |
"total": 0.9943354582786562, | |
"mean": 0.006293262394168709, | |
"stdev": 0.001967292830462185, | |
"p50": 0.006099374055862427, | |
"p90": 0.006784158992767334, | |
"p95": 0.006825614929199219, | |
"p99": 0.006897773418426514, | |
"values": [ | |
0.006699374198913574, | |
0.006381133079528809, | |
0.006367534160614014, | |
0.006467373847961426, | |
0.006468974113464356, | |
0.006448173999786377, | |
0.0063705739974975585, | |
0.006373774051666259, | |
0.006330573081970215, | |
0.006572014808654785, | |
0.006250574111938477, | |
0.006305774211883545, | |
0.006213932991027832, | |
0.0061947340965271, | |
0.00616497278213501, | |
0.006436014175415039, | |
0.006229292869567871, | |
0.0061449742317199706, | |
0.00615217399597168, | |
0.006135212898254395, | |
0.006143693923950195, | |
0.006132493019104004, | |
0.006121294021606445, | |
0.00634161376953125, | |
0.006194572925567627, | |
0.006152493953704834, | |
0.006130893230438233, | |
0.00614449405670166, | |
0.029930944442749022, | |
0.004021928787231445, | |
0.004000648975372314, | |
0.003998728990554809, | |
0.004015368938446045, | |
0.0039963281154632565, | |
0.004000648975372314, | |
0.004003048896789551, | |
0.004008328914642334, | |
0.005600972175598145, | |
0.006062572956085205, | |
0.006092333793640136, | |
0.005981933116912841, | |
0.006107694149017334, | |
0.0060649728775024414, | |
0.006092332839965821, | |
0.006066573143005371, | |
0.006066573143005371, | |
0.006082573890686035, | |
0.006082092761993408, | |
0.006044172763824463, | |
0.006092174053192139, | |
0.006057453155517578, | |
0.006061613082885742, | |
0.006068813800811768, | |
0.006098573207855225, | |
0.006098574161529541, | |
0.006048653125762939, | |
0.006115053176879883, | |
0.006062094211578369, | |
0.006092973232269287, | |
0.006050253868103027, | |
0.006098412990570069, | |
0.00606993293762207, | |
0.0061172938346862795, | |
0.00605409288406372, | |
0.006089454174041748, | |
0.0060600128173828125, | |
0.006075532913208008, | |
0.006060013771057129, | |
0.006056492805480957, | |
0.006092813014984131, | |
0.006097134113311768, | |
0.006084813117980957, | |
0.006092654228210449, | |
0.006096333026885986, | |
0.006070892810821533, | |
0.00610817289352417, | |
0.006074573040008545, | |
0.006322894096374512, | |
0.006100173950195313, | |
0.006063053131103516, | |
0.006090413093566895, | |
0.00607537317276001, | |
0.0060673727989196775, | |
0.006222894191741943, | |
0.006084493160247803, | |
0.00607137393951416, | |
0.006055053234100342, | |
0.006088492870330811, | |
0.006119053840637207, | |
0.006068172931671142, | |
0.0060856142044067385, | |
0.006070093154907226, | |
0.00606337308883667, | |
0.006048014163970947, | |
0.006107373237609863, | |
0.0060678539276123045, | |
0.006050733089447022, | |
0.006056972980499268, | |
0.006068813800811768, | |
0.0060552129745483395, | |
0.00608577299118042, | |
0.006065773963928223, | |
0.006053293228149414, | |
0.006049612998962403, | |
0.006072813034057617, | |
0.006078252792358399, | |
0.006059854030609131, | |
0.006058093070983887, | |
0.006095054149627685, | |
0.006065292835235596, | |
0.0060454530715942385, | |
0.006078413963317871, | |
0.006041293144226074, | |
0.006055532932281494, | |
0.0061081738471984864, | |
0.00609777307510376, | |
0.006086574077606201, | |
0.006062252998352051, | |
0.00633393383026123, | |
0.00635793399810791, | |
0.006325613975524902, | |
0.006301613807678222, | |
0.006411694049835205, | |
0.006299853801727295, | |
0.006411374092102051, | |
0.00633169412612915, | |
0.006292654037475586, | |
0.006404014110565186, | |
0.006283373832702637, | |
0.006400653839111328, | |
0.006330093860626221, | |
0.0063564939498901366, | |
0.0064648141860961916, | |
0.006863854885101319, | |
0.006817615032196045, | |
0.006810414791107178, | |
0.006942735195159912, | |
0.00629633378982544, | |
0.0064628939628601076, | |
0.006661293983459472, | |
0.0068260951042175295, | |
0.006795855045318604, | |
0.006825614929199219, | |
0.006827694892883301, | |
0.006738255023956299, | |
0.006780334949493408, | |
0.006825614929199219, | |
0.006787055015563965, | |
0.006793455123901367, | |
0.00660881519317627, | |
0.006774413108825684, | |
0.006798893928527832, | |
0.006778893947601318, | |
0.006778735160827637, | |
0.006783535003662109, | |
0.006835694789886475, | |
0.0068334550857543945, | |
0.006785614967346192 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 158.90009622458976 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |