IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
d483b82
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.3.1+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-classification", | |
"library": "transformers", | |
"model_type": "roberta", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "6", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"memory": true, | |
"latency": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "warn", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.256576, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]", | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 8, | |
"gpu_vram_mb": 549621596160, | |
"optimum_benchmark_version": "0.4.0", | |
"optimum_benchmark_commit": null, | |
"transformers_version": "4.44.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.34.2", | |
"accelerate_commit": null, | |
"diffusers_version": "0.30.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.9", | |
"timm_commit": null, | |
"peft_version": "0.12.0", | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"load": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1041.77664, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 44700.635136, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.374592 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 1, | |
"total": 7.73891943359375, | |
"mean": 7.73891943359375, | |
"stdev": 0.0, | |
"p50": 7.73891943359375, | |
"p90": 7.73891943359375, | |
"p95": 7.73891943359375, | |
"p99": 7.73891943359375, | |
"values": [ | |
7.73891943359375 | |
] | |
}, | |
"throughput": null, | |
"energy": null, | |
"efficiency": null, | |
"measures": null | |
}, | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1158.619136, | |
"max_global_vram": 68702.69952, | |
"max_process_vram": 221766.475776, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.4432 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 142, | |
"total": 0.9992563970088958, | |
"mean": 0.0070370168803443375, | |
"stdev": 0.001568298630084595, | |
"p50": 0.00689785099029541, | |
"p90": 0.007436171197891236, | |
"p95": 0.007753188037872315, | |
"p99": 0.015583963003158579, | |
"values": [ | |
0.00739729118347168, | |
0.007313930988311767, | |
0.007072171211242676, | |
0.00684896993637085, | |
0.0068736100196838375, | |
0.006872651100158692, | |
0.006856329917907715, | |
0.00713585090637207, | |
0.006889450073242188, | |
0.006858891010284424, | |
0.006925771236419678, | |
0.006930729866027832, | |
0.006887691020965576, | |
0.006877769947052002, | |
0.006912810802459717, | |
0.006866409778594971, | |
0.006908171176910401, | |
0.0069089698791503906, | |
0.006925930976867676, | |
0.00684512996673584, | |
0.006878410816192627, | |
0.006868169784545898, | |
0.006870729923248291, | |
0.006890571117401123, | |
0.006923689842224121, | |
0.006893610954284668, | |
0.006888490200042724, | |
0.006844810962677002, | |
0.006895209789276123, | |
0.016628185272216798, | |
0.002238563060760498, | |
0.005354248046875, | |
0.0068953709602355955, | |
0.006863049983978272, | |
0.006874890804290772, | |
0.006875529766082764, | |
0.006892011165618896, | |
0.006857450008392334, | |
0.00688529109954834, | |
0.0068996901512146, | |
0.006896010875701904, | |
0.0068638501167297365, | |
0.006874889850616455, | |
0.006860010147094727, | |
0.006862889766693115, | |
0.006873449802398682, | |
0.00686913013458252, | |
0.006879691123962402, | |
0.006844170093536377, | |
0.006864170074462891, | |
0.00689040994644165, | |
0.0068672108650207515, | |
0.00690241003036499, | |
0.006912971019744873, | |
0.0068924899101257325, | |
0.0068788909912109375, | |
0.007064970970153808, | |
0.006866569995880127, | |
0.006864330768585205, | |
0.006916009902954102, | |
0.006912810802459717, | |
0.00691441011428833, | |
0.006922571182250976, | |
0.0069278497695922855, | |
0.006923690795898437, | |
0.006901131153106689, | |
0.007142250061035156, | |
0.0069225702285766605, | |
0.006985130786895752, | |
0.006941450119018555, | |
0.0069380908012390136, | |
0.006918890953063965, | |
0.006907050132751465, | |
0.0069020910263061525, | |
0.006901289939880371, | |
0.00688481092453003, | |
0.006896329879760742, | |
0.01408130168914795, | |
0.0023364830017089844, | |
0.004989128112792968, | |
0.006897931098937988, | |
0.006874569892883301, | |
0.006888650894165039, | |
0.006942249774932861, | |
0.006884010791778564, | |
0.006890890121459961, | |
0.006899691104888916, | |
0.0068779301643371584, | |
0.0068984107971191404, | |
0.006916170120239258, | |
0.00692241096496582, | |
0.006916971206665039, | |
0.006904489994049072, | |
0.0069051308631896974, | |
0.006903689861297607, | |
0.0068769707679748535, | |
0.006888169765472412, | |
0.006884331226348877, | |
0.006855050086975098, | |
0.006857131004333496, | |
0.006881609916687011, | |
0.0068739309310913085, | |
0.00685984992980957, | |
0.006912331104278565, | |
0.006901290893554688, | |
0.0068900899887084964, | |
0.006908330917358398, | |
0.006867370128631592, | |
0.006904010772705078, | |
0.006883530139923095, | |
0.006877450942993164, | |
0.007449770927429199, | |
0.007781772136688232, | |
0.0077374520301818845, | |
0.007742891788482666, | |
0.007740171909332275, | |
0.007861291885375977, | |
0.0072080111503601075, | |
0.007440491199493408, | |
0.007741291999816894, | |
0.0077680120468139645, | |
0.007763212203979492, | |
0.007745131969451904, | |
0.007753612041473389, | |
0.01786050796508789, | |
0.0022990429401397706, | |
0.004296647071838379, | |
0.007362091064453125, | |
0.006910571098327637, | |
0.006957129955291748, | |
0.006960171222686768, | |
0.0068712100982666014, | |
0.006914570808410645, | |
0.006885610103607178, | |
0.0068924899101257325, | |
0.0068977708816528325, | |
0.00691664981842041, | |
0.006901930809020996, | |
0.006906410217285156, | |
0.00692401123046875, | |
0.006909289836883545, | |
0.006892169952392578 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 142.10567020141463 | |
}, | |
"energy": null, | |
"efficiency": null, | |
"measures": null | |
} | |
} | |
} |