IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
eb687cb
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-classification", | |
"library": "transformers", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": "88e73025c13ec1a964328bd3aa8fa3b1b8ab2582", | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1007.894528, | |
"max_global_vram": 897.142784, | |
"max_process_vram": 198816.514048, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.443712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 143, | |
"total": 0.9981015076637272, | |
"mean": 0.006979730822823265, | |
"stdev": 0.00034644343621254616, | |
"p50": 0.006878238201141357, | |
"p90": 0.007236477851867676, | |
"p95": 0.007412461805343628, | |
"p99": 0.0077717642593383785, | |
"values": [ | |
0.007600158214569092, | |
0.0069198389053344725, | |
0.006952638149261474, | |
0.006974557876586914, | |
0.006959197998046875, | |
0.006963199138641358, | |
0.006952797889709473, | |
0.006937277793884277, | |
0.006928318023681641, | |
0.006866559028625489, | |
0.00683279800415039, | |
0.006811198234558106, | |
0.0067891178131103515, | |
0.006766398906707763, | |
0.0067483181953430175, | |
0.006786878108978271, | |
0.0068081579208374024, | |
0.006783998966217041, | |
0.006784158229827881, | |
0.006823517799377441, | |
0.006831199169158936, | |
0.00677711820602417, | |
0.006772957801818848, | |
0.0068099179267883305, | |
0.006819358825683594, | |
0.00680767822265625, | |
0.006854877948760986, | |
0.006857277870178223, | |
0.0068110389709472655, | |
0.006806717872619629, | |
0.006791197776794434, | |
0.006767038822174072, | |
0.006783998012542725, | |
0.0067671980857849125, | |
0.006755358219146729, | |
0.006764638900756836, | |
0.006767518043518066, | |
0.006799997806549073, | |
0.006899038791656494, | |
0.00696991777420044, | |
0.006956478118896484, | |
0.006844158172607422, | |
0.006887518882751465, | |
0.007008159160614013, | |
0.006959197998046875, | |
0.010253597259521484, | |
0.0071119990348815915, | |
0.006904637813568116, | |
0.006915038108825684, | |
0.006889758110046387, | |
0.007080479145050049, | |
0.007083998203277588, | |
0.0072198381423950195, | |
0.006928318023681641, | |
0.006860797882080078, | |
0.006905599117279053, | |
0.007417597770690918, | |
0.007192957878112793, | |
0.007175518035888672, | |
0.006891997814178466, | |
0.00689263916015625, | |
0.006890398025512695, | |
0.00707631778717041, | |
0.006880477905273437, | |
0.006893918991088867, | |
0.006860958099365234, | |
0.006895197868347168, | |
0.006857597827911377, | |
0.006881278991699219, | |
0.0068547182083129885, | |
0.0069065580368042, | |
0.006884478092193604, | |
0.00688063907623291, | |
0.006860477924346924, | |
0.0070588779449462895, | |
0.0068937578201293944, | |
0.0068611187934875485, | |
0.006842557907104492, | |
0.0068937578201293944, | |
0.006888638019561768, | |
0.00689215898513794, | |
0.006849118232727051, | |
0.006855517864227295, | |
0.006878238201141357, | |
0.006861439228057861, | |
0.0068761582374572755, | |
0.006854238033294678, | |
0.006850557804107666, | |
0.006865118980407715, | |
0.006824957847595215, | |
0.006870398044586182, | |
0.006856159210205078, | |
0.006854557991027832, | |
0.006841598033905029, | |
0.006831198215484619, | |
0.006874878883361816, | |
0.00686671781539917, | |
0.00699663782119751, | |
0.006847198009490967, | |
0.006850399017333984, | |
0.007533277988433838, | |
0.007746717929840088, | |
0.007341757774353027, | |
0.007366238117218017, | |
0.007213438034057617, | |
0.00720239782333374, | |
0.007208958148956299, | |
0.00731903886795044, | |
0.0077726378440856935, | |
0.007716157913208008, | |
0.007770557880401611, | |
0.0073403182029724125, | |
0.007143837928771973, | |
0.007053438186645508, | |
0.007202717781066894, | |
0.007260798931121826, | |
0.007203197956085205, | |
0.007280158042907715, | |
0.007141757965087891, | |
0.007151998043060302, | |
0.00724063777923584, | |
0.006874238967895508, | |
0.006851037979125976, | |
0.006844637870788574, | |
0.006862237930297852, | |
0.006838239192962646, | |
0.006834397792816162, | |
0.0068430380821228026, | |
0.006847359180450439, | |
0.006824478149414063, | |
0.006858878135681153, | |
0.006855517864227295, | |
0.0069198389053344725, | |
0.006895037174224854, | |
0.0068692779541015625, | |
0.006848958015441895, | |
0.006852639198303223, | |
0.006853598117828369, | |
0.006835038185119629, | |
0.006908798217773437, | |
0.00688063907623291, | |
0.006851677894592285, | |
0.006899518966674805 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 143.27200079551278 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |