IlyasMoutawwakil's picture
Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub
0cddd4d verified
raw
history blame
7.74 kB
{
"config": {
"name": "cuda_inference_transformers_token-classification_microsoft/deberta-v3-base",
"backend": {
"name": "pytorch",
"version": "2.2.2+rocm5.7",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "token-classification",
"library": "transformers",
"model": "microsoft/deberta-v3-base",
"processor": "microsoft/deberta-v3-base",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"hub_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": null,
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": null,
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 1,
"duration": 1,
"warmup_runs": 1,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 2
},
"new_tokens": null,
"latency": true,
"memory": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 2,
"min_new_tokens": 2
},
"call_kwargs": {
"num_inference_steps": 2
}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "error",
"numactl": false,
"numactl_kwargs": {},
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7763 64-Core Processor",
"cpu_count": 128,
"cpu_ram_mb": 1082015.236096,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35",
"processor": "x86_64",
"python_version": "3.10.12",
"gpu": [
"Advanced Micro Devices, Inc. [AMD/ATI]"
],
"gpu_count": 1,
"gpu_vram_mb": 68702699520,
"optimum_benchmark_version": "0.2.1",
"optimum_benchmark_commit": "c594845efb520077430f4fe3f536bc1756e2b290",
"transformers_version": "4.42.3",
"transformers_commit": null,
"accelerate_version": "0.31.0",
"accelerate_commit": null,
"diffusers_version": "0.29.2",
"diffusers_commit": null,
"optimum_version": null,
"optimum_commit": null,
"timm_version": "1.0.7",
"timm_commit": null,
"peft_version": null,
"peft_commit": null
}
},
"report": {
"forward": {
"memory": {
"unit": "MB",
"max_ram": 1027.989504,
"max_global_vram": 1122.861056,
"max_process_vram": 230388.92032,
"max_reserved": 773.849088,
"max_allocated": 745.087488
},
"latency": {
"unit": "s",
"count": 73,
"total": 0.9966716995239261,
"mean": 0.013653036979779805,
"stdev": 0.0007702390262521916,
"p50": 0.013402519226074218,
"p90": 0.014918194389343262,
"p95": 0.015222482681274413,
"p99": 0.015808906173706058,
"values": [
0.014950034141540527,
0.015393074035644532,
0.015108755111694336,
0.01544011402130127,
0.015420754432678223,
0.015090675354003906,
0.014664435386657714,
0.014511795997619628,
0.0147908353805542,
0.014112756729125977,
0.013835798263549805,
0.013993396759033203,
0.013881558418273926,
0.013745718002319336,
0.013756598472595215,
0.01369451904296875,
0.01379611873626709,
0.013701719284057617,
0.014248757362365723,
0.013920277595520019,
0.01675722885131836,
0.013791478157043457,
0.013402519226074218,
0.01305403995513916,
0.013264439582824706,
0.01302828025817871,
0.013303799629211426,
0.013056920051574708,
0.013544919013977051,
0.01325771999359131,
0.013041879653930664,
0.013009401321411132,
0.013227479934692383,
0.013080439567565917,
0.013026680946350098,
0.013414519309997559,
0.012973560333251952,
0.012987160682678222,
0.013378838539123535,
0.012998041152954101,
0.013001720428466796,
0.013438678741455079,
0.013034681320190429,
0.012991800308227539,
0.013433399200439454,
0.013047321319580078,
0.01303339958190918,
0.0134679594039917,
0.013059639930725097,
0.013094520568847657,
0.01497531509399414,
0.014659475326538086,
0.013746519088745118,
0.014140756607055664,
0.013616598129272461,
0.013218199729919434,
0.013154199600219726,
0.013076760292053223,
0.013583958625793457,
0.01316331958770752,
0.013132280349731445,
0.013074040412902832,
0.013099800109863282,
0.013638198852539063,
0.013154840469360352,
0.013215640068054198,
0.013049880027770995,
0.013165399551391602,
0.013699638366699218,
0.013069721221923828,
0.013079639434814452,
0.013107480049133301,
0.013589879035949708
]
},
"throughput": {
"unit": "samples/s",
"value": 73.24377729885325
},
"energy": null,
"efficiency": null
}
}
}