IlyasMoutawwakil's picture
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
26eafda verified
raw
history blame
9.91 kB
{
"config": {
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
"backend": {
"name": "pytorch",
"version": "2.2.2+rocm5.7",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-classification",
"library": "transformers",
"model_type": "roberta",
"model": "FacebookAI/roberta-base",
"processor": "FacebookAI/roberta-base",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"hub_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": null,
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": null,
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 1,
"duration": 1,
"warmup_runs": 1,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 2
},
"new_tokens": null,
"latency": true,
"memory": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 2,
"min_new_tokens": 2
},
"call_kwargs": {
"num_inference_steps": 2
}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "error",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7763 64-Core Processor",
"cpu_count": 128,
"cpu_ram_mb": 1082015.236096,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35",
"processor": "x86_64",
"python_version": "3.10.12",
"gpu": [
"Advanced Micro Devices, Inc. [AMD/ATI]"
],
"gpu_count": 1,
"gpu_vram_mb": 68702699520,
"optimum_benchmark_version": "0.3.1",
"optimum_benchmark_commit": "2a33a472f309c43b5bd16946ef9cec843d02f70a",
"transformers_version": "4.42.4",
"transformers_commit": null,
"accelerate_version": "0.32.1",
"accelerate_commit": null,
"diffusers_version": "0.29.2",
"diffusers_commit": null,
"optimum_version": null,
"optimum_commit": null,
"timm_version": "1.0.7",
"timm_commit": null,
"peft_version": null,
"peft_commit": null
}
},
"report": {
"forward": {
"memory": {
"unit": "MB",
"max_ram": 1013.972992,
"max_global_vram": 897.1264,
"max_process_vram": 226131.140608,
"max_reserved": 555.74528,
"max_allocated": 499.443712
},
"latency": {
"unit": "s",
"count": 124,
"total": 0.9951000232696534,
"mean": 0.008025000187658494,
"stdev": 0.0005216565894527329,
"p50": 0.008109346389770507,
"p90": 0.00851576099395752,
"p95": 0.008899167919158936,
"p99": 0.009389333562850953,
"values": [
0.008382545471191407,
0.007371828079223632,
0.0074110269546508786,
0.007348146915435791,
0.007562547206878662,
0.007298388004302978,
0.007210227012634277,
0.007347987174987793,
0.007173908233642578,
0.007261587142944336,
0.007470867156982422,
0.0072593469619750975,
0.0072407879829406736,
0.007244466781616211,
0.007186707019805908,
0.007229588031768799,
0.00741374683380127,
0.007205586910247802,
0.007135188102722168,
0.00718590784072876,
0.007205586910247802,
0.007211187839508057,
0.0072311868667602535,
0.0072379069328308105,
0.007260628223419189,
0.007301267147064209,
0.007251826763153077,
0.007552786827087403,
0.007260306835174561,
0.007647507190704346,
0.008917743682861329,
0.009409744262695313,
0.009391983032226563,
0.009380463600158692,
0.009327982902526856,
0.009315823554992676,
0.00908414363861084,
0.00879390525817871,
0.0086372652053833,
0.008684624671936035,
0.008612785339355469,
0.008426384925842285,
0.008317264556884765,
0.00838126564025879,
0.008260464668273926,
0.00819822597503662,
0.008194066047668458,
0.008174864768981934,
0.008377426147460938,
0.008229265213012696,
0.008236145973205567,
0.00820782470703125,
0.008150225639343261,
0.008151185989379884,
0.008164786338806152,
0.008280945777893066,
0.00847630500793457,
0.008537264823913574,
0.008423344612121582,
0.008339345932006836,
0.008350065231323242,
0.008498064994812012,
0.008339345932006836,
0.008523344993591309,
0.008358224868774414,
0.008336465835571289,
0.00830462646484375,
0.008250865936279297,
0.008227346420288086,
0.00819278621673584,
0.008215024948120117,
0.00811374568939209,
0.008134705543518066,
0.008139025688171387,
0.008166544914245606,
0.008196625709533691,
0.008102545738220215,
0.008135186195373536,
0.008206544876098633,
0.008112146377563476,
0.008131985664367675,
0.008308944702148437,
0.00845230484008789,
0.008393905639648437,
0.008197745323181153,
0.007484306812286377,
0.007596947193145752,
0.007671826839447021,
0.007418868064880371,
0.007548787117004394,
0.00729390811920166,
0.00779198694229126,
0.008095986366271972,
0.008088306427001953,
0.008095344543457032,
0.008141105651855468,
0.008106546401977539,
0.008195506095886231,
0.008199344635009766,
0.008197746276855468,
0.008207825660705567,
0.00809230613708496,
0.00811902618408203,
0.008131985664367675,
0.008047506332397461,
0.00808686637878418,
0.008083505630493164,
0.008035825729370118,
0.007983985900878907,
0.007979186058044433,
0.008089426040649415,
0.00805534553527832,
0.008094386100769043,
0.008035506248474121,
0.008059024810791016,
0.008057106018066406,
0.008025105476379395,
0.008065265655517578,
0.008047825813293457,
0.008079826354980469,
0.008080306053161622,
0.008072786331176758,
0.008069106101989746,
0.008002705574035644
]
},
"throughput": {
"unit": "samples/s",
"value": 124.61058898638811
},
"energy": null,
"efficiency": null
}
}
}