IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub
159752a
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "multiple-choice", | |
"library": "transformers", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": "d920fe9626db1e7915f6d3574b5b54b0159cd100", | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1006.239744, | |
"max_global_vram": 898.473984, | |
"max_process_vram": 203727.42144, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.507712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 123, | |
"total": 0.9957338790893554, | |
"mean": 0.00809539739097037, | |
"stdev": 0.0008398965339142119, | |
"p50": 0.007723027229309082, | |
"p90": 0.009516560173034667, | |
"p95": 0.00974680004119873, | |
"p99": 0.01075251974105835, | |
"values": [ | |
0.010953581809997559, | |
0.011392781257629395, | |
0.010039663314819336, | |
0.009949584007263184, | |
0.009778223037719726, | |
0.009741904258728028, | |
0.0095556640625, | |
0.009514384269714356, | |
0.009565103530883788, | |
0.009472304344177247, | |
0.009428625106811523, | |
0.009461104393005371, | |
0.009691344261169434, | |
0.009484304428100585, | |
0.009401583671569824, | |
0.009352944374084472, | |
0.009400304794311523, | |
0.00950830364227295, | |
0.00971134376525879, | |
0.009747344017028809, | |
0.009517104148864745, | |
0.009990384101867675, | |
0.007858066082000733, | |
0.0077172679901123045, | |
0.007526547908782959, | |
0.007553267002105713, | |
0.007561587810516358, | |
0.007547027111053467, | |
0.007543347835540772, | |
0.007911346912384033, | |
0.007921106815338134, | |
0.007755987167358399, | |
0.007699507236480713, | |
0.007690227031707764, | |
0.007547348022460937, | |
0.007543986797332764, | |
0.007514867782592773, | |
0.007514387130737305, | |
0.007536468029022217, | |
0.0075204668045043945, | |
0.007554387092590332, | |
0.007511988162994385, | |
0.007562226772308349, | |
0.007571347236633301, | |
0.007555508136749268, | |
0.0075271868705749515, | |
0.0075257477760314944, | |
0.007521906852722168, | |
0.007617907047271728, | |
0.007671507835388183, | |
0.0075081467628479006, | |
0.00753790807723999, | |
0.00753790807723999, | |
0.007562547206878662, | |
0.007548307895660401, | |
0.007539187908172608, | |
0.007524147987365722, | |
0.007560147762298584, | |
0.0075529470443725585, | |
0.007552467823028565, | |
0.007528947830200195, | |
0.007514707088470459, | |
0.007519507884979248, | |
0.00751934814453125, | |
0.007521427154541015, | |
0.007614227771759033, | |
0.007539507865905762, | |
0.007528467178344727, | |
0.007521748065948486, | |
0.007491347789764405, | |
0.007736146926879883, | |
0.007478868007659912, | |
0.0076998271942138674, | |
0.007476307868957519, | |
0.00750574779510498, | |
0.0075015869140625, | |
0.007731667995452881, | |
0.00754686689376831, | |
0.007498867988586426, | |
0.007507507801055908, | |
0.007672787189483643, | |
0.007552787780761718, | |
0.007542226791381836, | |
0.007559988021850586, | |
0.007535828113555908, | |
0.0077260670661926266, | |
0.00750302791595459, | |
0.007481586933135986, | |
0.008385107040405274, | |
0.008572465896606445, | |
0.008507986068725586, | |
0.008539985656738282, | |
0.008247185707092285, | |
0.008296626091003418, | |
0.008182546615600587, | |
0.008060306549072265, | |
0.007893746852874755, | |
0.007899187088012695, | |
0.007915027141571045, | |
0.007913267135620117, | |
0.0076929469108581545, | |
0.007891026973724366, | |
0.007879027843475342, | |
0.008110547065734863, | |
0.008294547080993652, | |
0.008106706619262695, | |
0.007725748062133789, | |
0.0077852668762207035, | |
0.007815667152404785, | |
0.007723027229309082, | |
0.00789950704574585, | |
0.007850227832794189, | |
0.007901906967163086, | |
0.00787166690826416, | |
0.00777390718460083, | |
0.007774227142333985, | |
0.007908307075500488, | |
0.008232466697692871, | |
0.007683187007904053, | |
0.008133426666259765, | |
0.008115986824035644, | |
0.008093106269836425, | |
0.008336947441101075 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 123.52698103683001 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |