IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
8388054
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-classification", | |
"library": "transformers", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": "1ba65d84c92c93ea19ab2d2293c92bced987bae2", | |
"transformers_version": "4.40.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.30.1", | |
"accelerate_commit": null, | |
"diffusers_version": "0.27.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.3", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1007.04256, | |
"max_global_vram": 1877.512192, | |
"max_process_vram": 206404.93568, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.443712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 143, | |
"total": 0.9973802847862243, | |
"mean": 0.006974687306197372, | |
"stdev": 0.0003435063186580796, | |
"p50": 0.006848495006561279, | |
"p90": 0.007266864109039307, | |
"p95": 0.007457711839675903, | |
"p99": 0.007813777008056643, | |
"values": [ | |
0.00751889705657959, | |
0.006826254844665527, | |
0.006876655101776123, | |
0.006906575202941895, | |
0.007042736053466797, | |
0.007016654968261719, | |
0.007013296127319336, | |
0.00699969482421875, | |
0.0069808149337768554, | |
0.007365776062011719, | |
0.007457935810089111, | |
0.007478257179260254, | |
0.006794575214385986, | |
0.006983055114746094, | |
0.006824975967407227, | |
0.006803854942321778, | |
0.006821455001831055, | |
0.0067857751846313475, | |
0.0068358550071716305, | |
0.006910895824432373, | |
0.006834255218505859, | |
0.006824655055999756, | |
0.006830094814300537, | |
0.006821455955505371, | |
0.006850094795227051, | |
0.006828975200653076, | |
0.00694289493560791, | |
0.006886254787445068, | |
0.0069398550987243655, | |
0.006880815982818604, | |
0.006838415145874023, | |
0.006859535217285156, | |
0.006817295074462891, | |
0.006870255947113037, | |
0.006896814823150635, | |
0.00686369514465332, | |
0.006877935886383056, | |
0.00683265495300293, | |
0.006842895030975342, | |
0.006914415836334229, | |
0.006864335060119629, | |
0.006853936195373535, | |
0.01009538173675537, | |
0.007142896175384521, | |
0.006834095001220703, | |
0.0067979350090026855, | |
0.006827855110168457, | |
0.0067692952156066894, | |
0.006753775119781494, | |
0.006806735038757324, | |
0.006790734767913818, | |
0.0067550549507141115, | |
0.0067960147857666015, | |
0.006794895172119141, | |
0.006780334949493408, | |
0.006794095039367676, | |
0.006784814834594727, | |
0.0068241748809814455, | |
0.006816655158996582, | |
0.006979536056518554, | |
0.006956494808197021, | |
0.006831854820251465, | |
0.00682833480834961, | |
0.007455696105957031, | |
0.007697617053985596, | |
0.00727345609664917, | |
0.0073891358375549315, | |
0.007891057014465332, | |
0.007113134860992432, | |
0.00702129602432251, | |
0.006848495006561279, | |
0.006849615097045898, | |
0.006798415184020996, | |
0.0067814550399780274, | |
0.006829615116119385, | |
0.006831854820251465, | |
0.006825934886932373, | |
0.006817135810852051, | |
0.006875535011291504, | |
0.006781614780426025, | |
0.007023056030273437, | |
0.007144976139068604, | |
0.006818574905395508, | |
0.006849454879760742, | |
0.006794095039367676, | |
0.006810735225677491, | |
0.006810736179351806, | |
0.0068051347732543946, | |
0.006818254947662353, | |
0.006826254844665527, | |
0.006794895172119141, | |
0.006827694892883301, | |
0.006824655055999756, | |
0.006777295112609863, | |
0.006788014888763428, | |
0.006772494792938232, | |
0.006784814834594727, | |
0.006872014999389648, | |
0.0067814550399780274, | |
0.0068022551536560056, | |
0.006811375141143799, | |
0.006781774997711182, | |
0.006818894863128662, | |
0.0068737750053405764, | |
0.00674961519241333, | |
0.006807855129241943, | |
0.007411375999450683, | |
0.007036974906921387, | |
0.007015376091003418, | |
0.007113455772399902, | |
0.007189455986022949, | |
0.0072699360847473145, | |
0.007013615131378174, | |
0.007164814949035645, | |
0.007312335968017578, | |
0.0072465758323669435, | |
0.007146736145019531, | |
0.006796335220336914, | |
0.00679393482208252, | |
0.006803695201873779, | |
0.006842095851898194, | |
0.006787535190582276, | |
0.006836335182189942, | |
0.007707056999206543, | |
0.007636016845703125, | |
0.007254576206207276, | |
0.007150576114654541, | |
0.007091216087341309, | |
0.0071849761009216305, | |
0.0071628961563110355, | |
0.007069615840911865, | |
0.007068815231323242, | |
0.007155535221099854, | |
0.007250576972961426, | |
0.007209296226501465, | |
0.007159535884857178, | |
0.006837454795837403, | |
0.006807535171508789, | |
0.006856494903564453, | |
0.006827054977416992, | |
0.006834414958953858, | |
0.006855693817138672, | |
0.006852654933929443 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 143.37560324911598 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |