![IlyasMoutawwakil's picture](https://cdn-avatars.huggingface.co/v1/production/uploads/1642598610696-noauth.jpeg)
IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
4180af5
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-classification", | |
"library": "transformers", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.0", | |
"optimum_benchmark_commit": "77e62a3eef699bce70248c56d35c703369938b85", | |
"transformers_version": "4.40.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.30.1", | |
"accelerate_commit": null, | |
"diffusers_version": "0.27.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.3", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1008.959488, | |
"max_global_vram": 2522.836992, | |
"max_process_vram": 226898.518016, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.443712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 139, | |
"total": 0.9981516461372376, | |
"mean": 0.0071809470945125, | |
"stdev": 0.0003183605302359094, | |
"p50": 0.007091347217559814, | |
"p90": 0.007381938934326172, | |
"p95": 0.007519570779800414, | |
"p99": 0.007941451139450073, | |
"values": [ | |
0.007919826030731202, | |
0.007084307193756104, | |
0.007131347179412842, | |
0.007205427169799805, | |
0.007233266830444336, | |
0.007191027164459228, | |
0.007468945980072021, | |
0.007454866886138916, | |
0.007448145866394043, | |
0.007505746841430664, | |
0.00739422607421875, | |
0.007310067176818848, | |
0.007332306861877441, | |
0.0072875070571899415, | |
0.007303826808929444, | |
0.007191826820373535, | |
0.00722318696975708, | |
0.0071572670936584475, | |
0.0071598272323608395, | |
0.007091347217559814, | |
0.0070843081474304195, | |
0.007174868106842041, | |
0.007135988235473633, | |
0.007057907104492188, | |
0.007338547229766846, | |
0.007123188018798828, | |
0.007170707225799561, | |
0.007203987121582031, | |
0.00716206693649292, | |
0.007123666763305664, | |
0.0071027069091796876, | |
0.007140787124633789, | |
0.007090867042541504, | |
0.007150866985321045, | |
0.007123666763305664, | |
0.007299187183380127, | |
0.007139347076416016, | |
0.007074546813964844, | |
0.00709486722946167, | |
0.007080627918243408, | |
0.007022387981414795, | |
0.007189427852630615, | |
0.010238061904907227, | |
0.007643986225128174, | |
0.007075346946716308, | |
0.007070868015289306, | |
0.007056628227233887, | |
0.007053427219390869, | |
0.007045746803283692, | |
0.0070311870574951175, | |
0.006975828170776367, | |
0.007052146911621094, | |
0.00706318712234497, | |
0.007262227058410645, | |
0.007053906917572021, | |
0.007051348209381103, | |
0.007378867149353027, | |
0.007138707160949707, | |
0.007047347068786621, | |
0.007018708229064942, | |
0.007068787097930908, | |
0.006990386962890625, | |
0.007085906982421875, | |
0.007226867198944092, | |
0.007213266849517822, | |
0.007223027229309082, | |
0.0070721468925476075, | |
0.007887666225433349, | |
0.007074387073516846, | |
0.006992147922515869, | |
0.007117907047271729, | |
0.007012466907501221, | |
0.007091506958007813, | |
0.007059826850891114, | |
0.007017268180847168, | |
0.00707470703125, | |
0.007002226829528809, | |
0.007013906955718994, | |
0.007130387783050537, | |
0.007081266880035401, | |
0.006995666980743409, | |
0.007040787220001221, | |
0.007078708171844483, | |
0.007096628189086914, | |
0.007026226997375489, | |
0.0070630269050598144, | |
0.007061906814575195, | |
0.007101906776428223, | |
0.007059668064117432, | |
0.007103346824645996, | |
0.007100626945495605, | |
0.007048787117004395, | |
0.007042226791381836, | |
0.007047028064727783, | |
0.007106067180633545, | |
0.007049586772918701, | |
0.007069427013397217, | |
0.007051667213439941, | |
0.007094868183135986, | |
0.007091667175292969, | |
0.007081747055053711, | |
0.007106866836547852, | |
0.007089587211608886, | |
0.007068147182464599, | |
0.006969107151031494, | |
0.007127027988433838, | |
0.007092628002166748, | |
0.007051667213439941, | |
0.007000786781311035, | |
0.0069948678016662595, | |
0.007072627067565918, | |
0.0070790271759033204, | |
0.007019987106323242, | |
0.007085426807403565, | |
0.007112627029418945, | |
0.0070659079551696775, | |
0.007070547103881836, | |
0.00706270694732666, | |
0.007078707218170166, | |
0.0070316681861877445, | |
0.007048947811126709, | |
0.007074546813964844, | |
0.007846705913543701, | |
0.007954705238342285, | |
0.0077140660285949705, | |
0.007284467220306397, | |
0.007434707164764404, | |
0.007435986042022705, | |
0.007326547145843506, | |
0.007276947021484375, | |
0.007210066795349121, | |
0.0072801470756530765, | |
0.007296466827392578, | |
0.007345586776733398, | |
0.007026226997375489, | |
0.00707710599899292, | |
0.0069875078201293945, | |
0.007039506912231445, | |
0.0070275068283081055 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 139.2573969475663 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |