{ "config": { "name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base", "backend": { "name": "pytorch", "version": "2.2.2+rocm5.7", "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", "task": "multiple-choice", "library": "transformers", "model_type": "roberta", "model": "FacebookAI/roberta-base", "processor": "FacebookAI/roberta-base", "device": "cuda", "device_ids": "0", "seed": 42, "inter_op_num_threads": null, "intra_op_num_threads": null, "model_kwargs": {}, "processor_kwargs": {}, "hub_kwargs": {}, "no_weights": true, "device_map": null, "torch_dtype": null, "eval_mode": true, "to_bettertransformer": false, "low_cpu_mem_usage": null, "attn_implementation": null, "cache_implementation": null, "autocast_enabled": false, "autocast_dtype": null, "torch_compile": false, "torch_compile_target": "forward", "torch_compile_config": {}, "quantization_scheme": null, "quantization_config": {}, "deepspeed_inference": false, "deepspeed_inference_config": {}, "peft_type": null, "peft_config": {} }, "scenario": { "name": "inference", "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", "iterations": 1, "duration": 1, "warmup_runs": 1, "input_shapes": { "batch_size": 1, "num_choices": 2, "sequence_length": 2 }, "new_tokens": null, "memory": true, "latency": true, "energy": false, "forward_kwargs": {}, "generate_kwargs": { "max_new_tokens": 2, "min_new_tokens": 2 }, "call_kwargs": { "num_inference_steps": 2 } }, "launcher": { "name": "process", "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", "device_isolation": true, "device_isolation_action": "error", "numactl": false, "numactl_kwargs": {}, "start_method": "spawn" }, "environment": { "cpu": " AMD EPYC 7763 64-Core Processor", "cpu_count": 128, "cpu_ram_mb": 1082015.236096, "system": "Linux", "machine": "x86_64", "platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", "processor": "x86_64", "python_version": "3.10.12", "gpu": [ "Advanced Micro Devices, Inc. [AMD/ATI]" ], "gpu_count": 1, "gpu_vram_mb": 68702699520, "optimum_benchmark_version": "0.3.1", "optimum_benchmark_commit": "bc977b19adf337e8eb02985f3cceb35e1a783a1c", "transformers_version": "4.42.4", "transformers_commit": null, "accelerate_version": "0.32.1", "accelerate_commit": null, "diffusers_version": "0.29.2", "diffusers_commit": null, "optimum_version": null, "optimum_commit": null, "timm_version": "1.0.7", "timm_commit": null, "peft_version": null, "peft_commit": null } }, "report": { "load": { "memory": { "unit": "MB", "max_ram": 900.308992, "max_global_vram": 841.777152, "max_process_vram": 46187.175936, "max_reserved": 555.74528, "max_allocated": 499.372544 }, "latency": { "unit": "s", "count": 1, "total": 7.13657958984375, "mean": 7.13657958984375, "stdev": 0.0, "p50": 7.13657958984375, "p90": 7.13657958984375, "p95": 7.13657958984375, "p99": 7.13657958984375, "values": [ 7.13657958984375 ] }, "throughput": null, "energy": null, "efficiency": null }, "forward": { "memory": { "unit": "MB", "max_ram": 1009.963008, "max_global_vram": 898.41664, "max_process_vram": 214640.04608, "max_reserved": 555.74528, "max_allocated": 499.5072 }, "latency": { "unit": "s", "count": 118, "total": 1.001474329948425, "mean": 0.008487070592783265, "stdev": 0.00018161653518041082, "p50": 0.008510077953338623, "p90": 0.008583806228637695, "p95": 0.008707757902145386, "p99": 0.008863461713790894, "values": [ 0.008439678192138672, 0.008294238090515136, 0.008230877876281737, 0.008288158416748046, 0.008247359275817871, 0.008218558311462402, 0.008151838302612305, 0.008372797966003417, 0.008154237747192383, 0.008134398460388183, 0.008139678001403809, 0.008100957870483398, 0.00807007884979248, 0.008053117752075196, 0.008053117752075196, 0.008095197677612305, 0.008094557762145996, 0.008592317581176758, 0.00857455825805664, 0.008562877655029296, 0.008475997924804687, 0.008568637847900391, 0.008711837768554687, 0.008829598426818847, 0.008870397567749023, 0.008773118019104005, 0.008672958374023437, 0.008582717895507812, 0.008500638961791993, 0.008471199035644531, 0.008499677658081056, 0.008475197792053222, 0.00847695827484131, 0.008493118286132813, 0.008480957984924316, 0.008502397537231445, 0.0084644775390625, 0.008466238021850587, 0.008450078010559083, 0.008504958152770995, 0.008444157600402832, 0.008463678359985351, 0.008553117752075194, 0.008561918258666992, 0.00853695774078369, 0.00850319766998291, 0.008547838211059571, 0.008526239395141601, 0.008529277801513671, 0.008469437599182129, 0.00849871826171875, 0.008582398414611817, 0.00855727767944336, 0.00852559757232666, 0.008539677619934082, 0.008553117752075194, 0.008531357765197754, 0.008526557922363282, 0.008463997840881348, 0.008440157890319825, 0.008485918045043945, 0.008507038116455078, 0.008479517936706543, 0.008540317535400391, 0.008431198120117188, 0.008557438850402832, 0.008559839248657226, 0.008516958236694336, 0.008563838005065917, 0.008572478294372559, 0.008567678451538085, 0.00858559799194336, 0.0085527982711792, 0.00858559799194336, 0.007921117782592774, 0.008485918045043945, 0.008520797729492187, 0.00846175765991211, 0.008542078018188477, 0.008537278175354004, 0.008546398162841798, 0.008562397956848145, 0.008524157524108887, 0.008543837547302245, 0.008520479202270509, 0.00949567699432373, 0.008707037925720215, 0.008518238067626954, 0.008511677742004394, 0.008498079299926757, 0.008490238189697265, 0.008539358139038085, 0.008443998336791993, 0.008438077926635743, 0.008468638420104981, 0.008494237899780273, 0.008475518226623536, 0.008456478118896485, 0.0084446382522583, 0.008527358055114747, 0.00850479793548584, 0.008482077598571777, 0.008602397918701172, 0.008718398094177246, 0.008448958396911621, 0.008508478164672852, 0.008553438186645508, 0.008572157859802245, 0.008477438926696777, 0.008503837585449219, 0.008574077606201172, 0.008560957908630372, 0.008568317413330077, 0.00853855800628662, 0.00853759765625, 0.008551837921142579, 0.008583038330078126, 0.00851183795928955 ] }, "throughput": { "unit": "samples/s", "value": 117.82628517904884 }, "energy": null, "efficiency": null } } }