rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
a67a9c7
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.4.0+rocm6.1", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model_type": "vit", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.33.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1387.27424, | |
"max_global_vram": 11.083776, | |
"max_process_vram": 0.0, | |
"max_reserved": 400.556032, | |
"max_allocated": 346.271744 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 7.97714111328125, | |
"mean": 7.97714111328125, | |
"stdev": 0.0, | |
"p50": 7.97714111328125, | |
"p90": 7.97714111328125, | |
"p95": 7.97714111328125, | |
"p99": 7.97714111328125, | |
"values": [ | |
7.97714111328125 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1557.970944, | |
"max_global_vram": 11.329536, | |
"max_process_vram": 0.0, | |
"max_reserved": 406.847488, | |
"max_allocated": 354.740224 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 166, | |
"total": 0.997475886821747, | |
"mean": 0.006008890884468355, | |
"stdev": 0.0044019936530691, | |
"p50": 0.005726623058319092, | |
"p90": 0.006320780992507935, | |
"p95": 0.00651046097278595, | |
"p99": 0.0066195000886917115, | |
"values": [ | |
0.006621580123901367, | |
0.006481901168823242, | |
0.006469260215759278, | |
0.006576620101928711, | |
0.006618380069732666, | |
0.006568940162658692, | |
0.006519980907440185, | |
0.006541581153869629, | |
0.0065641398429870604, | |
0.006531819820404052, | |
0.006261260986328125, | |
0.005860143184661866, | |
0.005855662822723389, | |
0.005774221897125244, | |
0.06205949783325195, | |
0.004644465923309326, | |
0.004635025978088379, | |
0.004626706123352051, | |
0.004636625766754151, | |
0.004641265869140625, | |
0.0046399860382080075, | |
0.004637906074523926, | |
0.00462110710144043, | |
0.004639826774597168, | |
0.004649586200714111, | |
0.004632785797119141, | |
0.004633585929870606, | |
0.00462622594833374, | |
0.0046497459411621095, | |
0.004635826110839843, | |
0.004610226154327392, | |
0.004631825923919678, | |
0.0046423859596252445, | |
0.004663986206054687, | |
0.004637105941772461, | |
0.004641265869140625, | |
0.004631986141204834, | |
0.004649906158447265, | |
0.0046283059120178225, | |
0.004641106128692627, | |
0.004629907131195069, | |
0.004626865863800049, | |
0.004651825904846191, | |
0.004633746147155762, | |
0.004634225845336914, | |
0.004641426086425781, | |
0.004643186092376709, | |
0.004632145881652832, | |
0.0053995041847229005, | |
0.006267661094665527, | |
0.006273100852966309, | |
0.006233581066131592, | |
0.006236940860748291, | |
0.006262220859527588, | |
0.006236620903015137, | |
0.00624622106552124, | |
0.0062724609375, | |
0.006252302169799805, | |
0.005717742919921875, | |
0.005694222927093506, | |
0.005764943122863769, | |
0.005664622783660889, | |
0.005689582824707031, | |
0.005746063232421875, | |
0.00564590311050415, | |
0.005718702793121338, | |
0.005634542942047119, | |
0.005725903034210205, | |
0.005658863067626953, | |
0.005712782859802246, | |
0.0057262229919433595, | |
0.005687501907348633, | |
0.005684622764587402, | |
0.005691503047943115, | |
0.00570222282409668, | |
0.005752461910247803, | |
0.005682703018188477, | |
0.00595454216003418, | |
0.005746542930603027, | |
0.005749742984771728, | |
0.005673583030700684, | |
0.005681103229522705, | |
0.005735823154449463, | |
0.005658543109893799, | |
0.0057139029502868656, | |
0.005708783149719238, | |
0.00574910306930542, | |
0.00569838285446167, | |
0.005950061798095703, | |
0.005685262203216553, | |
0.005748942852020263, | |
0.005727023124694824, | |
0.005793422222137451, | |
0.005684622764587402, | |
0.005695823192596435, | |
0.005740622997283936, | |
0.005771183013916016, | |
0.005659822940826416, | |
0.005700142860412598, | |
0.0057257418632507325, | |
0.005654862880706787, | |
0.005697742938995361, | |
0.005716302871704102, | |
0.005783661842346192, | |
0.005665742874145508, | |
0.0056839828491210935, | |
0.005716143131256103, | |
0.005681902885437012, | |
0.005669103145599365, | |
0.005699822902679444, | |
0.005839182853698731, | |
0.0057734217643737795, | |
0.005679983139038086, | |
0.00570270299911499, | |
0.005731982231140137, | |
0.00575294303894043, | |
0.005691022872924805, | |
0.005728463172912598, | |
0.005717742919921875, | |
0.005743982791900634, | |
0.0056785430908203125, | |
0.00571054220199585, | |
0.0058662228584289555, | |
0.005777582168579102, | |
0.00576110315322876, | |
0.005648783206939697, | |
0.005689262866973877, | |
0.005727342128753662, | |
0.0056711831092834475, | |
0.005877101898193359, | |
0.005857263088226318, | |
0.005897741794586181, | |
0.005731503009796143, | |
0.005772303104400635, | |
0.0056839828491210935, | |
0.00573326301574707, | |
0.0057745428085327144, | |
0.005766703128814697, | |
0.005691503047943115, | |
0.005708942890167237, | |
0.00575998306274414, | |
0.005780782222747803, | |
0.005790223121643066, | |
0.005984141826629639, | |
0.00624622106552124, | |
0.006243980884552002, | |
0.00622222089767456, | |
0.006259020805358887, | |
0.006261421203613281, | |
0.006255021095275879, | |
0.0062635021209716795, | |
0.00630030107498169, | |
0.0063199810981750485, | |
0.006330700874328613, | |
0.006354540824890137, | |
0.0063321409225463865, | |
0.006330700874328613, | |
0.006315340995788574, | |
0.006407021045684814, | |
0.006306540966033935, | |
0.0063028612136840825, | |
0.006321580886840821, | |
0.0062724609375, | |
0.0063010997772216795, | |
0.006297420978546142, | |
0.0062990207672119145 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 166.42006307433164 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |