rocm
/
cuda_inference_transformers_image-classification_google
/vit-base-patch16-224
/benchmark.json
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub
546a89a
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "image-classification", | |
"library": "transformers", | |
"model": "google/vit-base-patch16-224", | |
"processor": "google/vit-base-patch16-224", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": "56d026bf244c6516d8cb780280ce7cc6505f270e", | |
"transformers_version": "4.40.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.30.1", | |
"accelerate_commit": null, | |
"diffusers_version": "0.27.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.3", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1061.511168, | |
"max_global_vram": 1758.8224, | |
"max_process_vram": 169640.2432, | |
"max_reserved": 406.847488, | |
"max_allocated": 355.912704 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 159, | |
"total": 0.99409481048584, | |
"mean": 0.006252168619407797, | |
"stdev": 0.000202846752522202, | |
"p50": 0.0061825718879699705, | |
"p90": 0.006486155891418457, | |
"p95": 0.006586828136444092, | |
"p99": 0.006915088205337524, | |
"values": [ | |
0.006333450794219971, | |
0.006251532077789307, | |
0.006217772006988525, | |
0.006218891143798828, | |
0.006264331817626953, | |
0.006353131771087647, | |
0.006386571884155273, | |
0.006309452056884765, | |
0.006364811897277832, | |
0.006316971778869629, | |
0.006314570903778076, | |
0.006308970928192138, | |
0.006528812885284424, | |
0.0062910509109497074, | |
0.006288812160491943, | |
0.006249291896820068, | |
0.0062409710884094236, | |
0.0062249708175659176, | |
0.006294251918792725, | |
0.006272811889648438, | |
0.0061963310241699215, | |
0.006208491802215576, | |
0.006260171890258789, | |
0.0062745709419250485, | |
0.006233130931854248, | |
0.006315052032470703, | |
0.006253131866455078, | |
0.006235850811004639, | |
0.006254732131958008, | |
0.0062724919319152835, | |
0.006351051807403564, | |
0.006288651943206787, | |
0.006241771221160888, | |
0.006305771827697754, | |
0.006229611873626709, | |
0.006225931167602539, | |
0.007886575222015382, | |
0.006320971965789795, | |
0.006151691913604736, | |
0.006144491195678711, | |
0.006119532108306885, | |
0.006178571224212647, | |
0.0061942520141601565, | |
0.006135691165924073, | |
0.006130091190338135, | |
0.006131210803985596, | |
0.00618801212310791, | |
0.006180012226104736, | |
0.006123691082000732, | |
0.006172331809997558, | |
0.006181770801544189, | |
0.006133131980895996, | |
0.0062171311378479005, | |
0.00616353178024292, | |
0.00613313102722168, | |
0.00617985200881958, | |
0.006133450984954834, | |
0.006106572151184082, | |
0.00611297082901001, | |
0.00611489200592041, | |
0.006140330791473389, | |
0.006091372013092041, | |
0.006150731086730957, | |
0.006152331829071045, | |
0.006152811050415039, | |
0.006153771877288819, | |
0.0061675310134887695, | |
0.0061894521713256835, | |
0.006181931018829346, | |
0.0061865711212158205, | |
0.006166252136230468, | |
0.00616641092300415, | |
0.006174091815948486, | |
0.0061742510795593265, | |
0.006125611782073975, | |
0.006109450817108154, | |
0.006179212093353271, | |
0.006142090797424316, | |
0.006143052101135254, | |
0.006203530788421631, | |
0.006181612014770508, | |
0.006382091999053955, | |
0.006177772045135498, | |
0.006208331108093262, | |
0.006155211925506592, | |
0.006139211177825928, | |
0.006141612052917481, | |
0.006151531219482422, | |
0.006221931934356689, | |
0.006161770820617676, | |
0.0061427321434021, | |
0.006183210849761963, | |
0.006146571159362793, | |
0.0061825718879699705, | |
0.006169610977172851, | |
0.006190092086791992, | |
0.006137771129608154, | |
0.006155531883239746, | |
0.006128011226654052, | |
0.006216012001037598, | |
0.00621825122833252, | |
0.006154891014099121, | |
0.0061660919189453125, | |
0.006145131111145019, | |
0.006142412185668945, | |
0.006167370796203614, | |
0.0061752119064331056, | |
0.006203052043914795, | |
0.006107050895690918, | |
0.006128011226654052, | |
0.006211851119995117, | |
0.006184971809387207, | |
0.006135371208190918, | |
0.0061612920761108395, | |
0.006162570953369141, | |
0.00613841199874878, | |
0.006104170799255371, | |
0.00612529182434082, | |
0.0061412911415100096, | |
0.006192011833190918, | |
0.006185931205749512, | |
0.006101451873779297, | |
0.00620465087890625, | |
0.006666091918945313, | |
0.006583052158355713, | |
0.00633649206161499, | |
0.006499852180480957, | |
0.006529771804809571, | |
0.006942092895507812, | |
0.006870252132415771, | |
0.006895533084869385, | |
0.006680172920227051, | |
0.006411052227020263, | |
0.006569292068481445, | |
0.006512012004852295, | |
0.0065121731758117675, | |
0.006620811939239502, | |
0.006458251953125, | |
0.006456811904907226, | |
0.006650893211364746, | |
0.006482731819152832, | |
0.006579212188720703, | |
0.0063411321640014645, | |
0.006137131214141845, | |
0.00613505220413208, | |
0.0061387310028076176, | |
0.006154891967773438, | |
0.006149930953979492, | |
0.006171850204467774, | |
0.0061392121315002445, | |
0.006183850765228272, | |
0.006174091815948486, | |
0.006164650917053222, | |
0.006131691932678223, | |
0.006121931076049805, | |
0.006192811965942383, | |
0.006171051025390625, | |
0.006116652011871338, | |
0.006121131896972656 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 159.94450259959874 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |