IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
7e19a39
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-classification", | |
"library": "transformers", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.0", | |
"optimum_benchmark_commit": "6422f8f56820540ec4876445962a50a44754b92e", | |
"transformers_version": "4.40.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.30.1", | |
"accelerate_commit": null, | |
"diffusers_version": "0.27.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.3", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1005.068288, | |
"max_global_vram": 897.150976, | |
"max_process_vram": 229173.878784, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.443712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 140, | |
"total": 0.9973588094711304, | |
"mean": 0.00712399149622236, | |
"stdev": 0.00039091738319720147, | |
"p50": 0.007005492448806763, | |
"p90": 0.007390914916992187, | |
"p95": 0.007509210062026977, | |
"p99": 0.0080148854637146, | |
"values": [ | |
0.007476450920104981, | |
0.00728461217880249, | |
0.007161571979522705, | |
0.00716461181640625, | |
0.007187652111053467, | |
0.007266851902008056, | |
0.007401570796966552, | |
0.007483010768890381, | |
0.007448131084442138, | |
0.007357090950012207, | |
0.007330211162567138, | |
0.007529729843139648, | |
0.007314371109008789, | |
0.007269251823425293, | |
0.007213571071624756, | |
0.007189251899719238, | |
0.007199332237243652, | |
0.007093572139739991, | |
0.007112771987915039, | |
0.00733341121673584, | |
0.007139011859893799, | |
0.007093252182006836, | |
0.007048451900482177, | |
0.007030693054199219, | |
0.007064773082733154, | |
0.007013092041015625, | |
0.007079172134399414, | |
0.007061091899871827, | |
0.01094651699066162, | |
0.007128771781921387, | |
0.007151971817016602, | |
0.007319651126861572, | |
0.00715037202835083, | |
0.006986853122711182, | |
0.007015011787414551, | |
0.007184452056884766, | |
0.0070097317695617676, | |
0.007056131839752198, | |
0.0070054130554199215, | |
0.00697021198272705, | |
0.00690125322341919, | |
0.0075081300735473635, | |
0.007186532020568848, | |
0.0069530930519104, | |
0.006995013236999512, | |
0.007029411792755127, | |
0.007161411762237549, | |
0.007389730930328369, | |
0.006919972896575928, | |
0.007108451843261719, | |
0.006940453052520752, | |
0.007199652194976806, | |
0.007217092037200928, | |
0.007191172122955322, | |
0.0069532527923583986, | |
0.007471331119537353, | |
0.006978052139282227, | |
0.006947012901306152, | |
0.006928133010864258, | |
0.006903653144836426, | |
0.006897253036499023, | |
0.007014052867889404, | |
0.0069655728340148925, | |
0.0069386930465698245, | |
0.006880612850189209, | |
0.006963013172149658, | |
0.007005571842193603, | |
0.007758369922637939, | |
0.006948132038116455, | |
0.0069887728691101075, | |
0.0069670119285583495, | |
0.007022852897644043, | |
0.006964131832122803, | |
0.006968293190002441, | |
0.006951333045959473, | |
0.006963653087615967, | |
0.006962213039398193, | |
0.006969411849975586, | |
0.006970693111419677, | |
0.006955812931060791, | |
0.006959493160247803, | |
0.00703773307800293, | |
0.006945733070373535, | |
0.0069606127738952635, | |
0.006948771953582763, | |
0.007014533042907715, | |
0.007011971950531006, | |
0.006959173202514648, | |
0.006957413196563721, | |
0.0069766130447387695, | |
0.0070098919868469235, | |
0.0069367728233337405, | |
0.006895652770996094, | |
0.00690733289718628, | |
0.006977732181549072, | |
0.00694445276260376, | |
0.006929253101348877, | |
0.006920773029327393, | |
0.0069551730155944826, | |
0.006995013236999512, | |
0.006987651824951172, | |
0.006945413112640381, | |
0.006968451976776123, | |
0.006937092781066895, | |
0.007133572101593017, | |
0.007038372039794922, | |
0.006937092781066895, | |
0.006956131935119629, | |
0.006923973083496094, | |
0.006964932918548584, | |
0.006990052223205566, | |
0.006927813053131103, | |
0.006929412841796875, | |
0.006942212104797363, | |
0.007017252922058105, | |
0.006936131954193115, | |
0.006926053047180176, | |
0.006952292919158935, | |
0.006954691886901856, | |
0.006955973148345947, | |
0.0069673328399658205, | |
0.006956131935119629, | |
0.006976293087005615, | |
0.0069812521934509274, | |
0.00697709321975708, | |
0.006949892997741699, | |
0.007486530780792236, | |
0.007869409084320069, | |
0.008017568588256837, | |
0.008010688781738282, | |
0.007377571105957032, | |
0.007334530830383301, | |
0.007156452178955078, | |
0.007107172012329101, | |
0.006951013088226318, | |
0.006994051933288574, | |
0.007132931232452393, | |
0.007749090194702149, | |
0.007291652202606201, | |
0.007293570995330811 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 140.37074588456068 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |