IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
767e1d7
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-classification", | |
"library": "transformers", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": "c1d0b062e90b79e7705510c58cea731c0d90da8a", | |
"transformers_version": "4.40.2", | |
"transformers_commit": null, | |
"accelerate_version": "0.30.1", | |
"accelerate_commit": null, | |
"diffusers_version": "0.27.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.3", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1007.521792, | |
"max_global_vram": 1877.528576, | |
"max_process_vram": 218551.11168, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.443712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 142, | |
"total": 1.0005787944793698, | |
"mean": 0.007046329538587113, | |
"stdev": 0.00039151896728983914, | |
"p50": 0.00687505030632019, | |
"p90": 0.007406860017776489, | |
"p95": 0.007723324108123779, | |
"p99": 0.007985287923812867, | |
"values": [ | |
0.007363851070404053, | |
0.007090570926666259, | |
0.006868330001831055, | |
0.006923050880432129, | |
0.006986090183258057, | |
0.007040491104125976, | |
0.007104811191558838, | |
0.007864171981811524, | |
0.007149291038513183, | |
0.006880650043487549, | |
0.006814091205596924, | |
0.006873770236968994, | |
0.006804649829864502, | |
0.0068097710609436035, | |
0.006837130069732666, | |
0.006851050853729248, | |
0.00681024980545044, | |
0.006829609870910644, | |
0.006825770854949951, | |
0.0069089698791503906, | |
0.006865931034088135, | |
0.00715329122543335, | |
0.00687185001373291, | |
0.00683937120437622, | |
0.006805449962615967, | |
0.006867051124572754, | |
0.00685984992980957, | |
0.0068337697982788085, | |
0.0068564901351928715, | |
0.006821290016174316, | |
0.006875210762023925, | |
0.006862410068511963, | |
0.006857611179351806, | |
0.01033937644958496, | |
0.0070460901260375975, | |
0.00686561107635498, | |
0.006893610000610352, | |
0.006855210781097412, | |
0.0068713698387146, | |
0.006892331123352051, | |
0.006846889972686767, | |
0.006884171009063721, | |
0.006874889850616455, | |
0.006896811008453369, | |
0.006841450214385986, | |
0.006842250823974609, | |
0.007042570114135742, | |
0.007026090145111084, | |
0.006915690898895263, | |
0.00684896993637085, | |
0.006865931034088135, | |
0.0069995298385620115, | |
0.007043530941009521, | |
0.006852170944213867, | |
0.006865610122680664, | |
0.006989931106567383, | |
0.006868490219116211, | |
0.006874250888824463, | |
0.00708096981048584, | |
0.006848810195922851, | |
0.006884170055389404, | |
0.006863211154937744, | |
0.006852009773254395, | |
0.006865931034088135, | |
0.006836329936981201, | |
0.006867691040039063, | |
0.006884809970855713, | |
0.008069453239440917, | |
0.007161769866943359, | |
0.006880169868469239, | |
0.0068920102119445804, | |
0.00687952995300293, | |
0.006890250205993652, | |
0.0068736100196838375, | |
0.0068540902137756345, | |
0.006865291118621826, | |
0.006835849761962891, | |
0.006830410957336426, | |
0.006829450130462647, | |
0.00682801103591919, | |
0.006812170028686524, | |
0.006885449886322021, | |
0.006837130069732666, | |
0.006874569892883301, | |
0.006823050975799561, | |
0.006833610057830811, | |
0.006848011016845703, | |
0.006820489883422851, | |
0.006834731101989746, | |
0.006807370185852051, | |
0.006839849948883057, | |
0.007132330894470215, | |
0.007408172130584717, | |
0.006842249870300293, | |
0.0068411297798156735, | |
0.006966570854187012, | |
0.006834090232849121, | |
0.006943690776824951, | |
0.007074891090393066, | |
0.006836170196533203, | |
0.007341771125793457, | |
0.0071672101020812985, | |
0.0077656121253967285, | |
0.007544651985168457, | |
0.007724172115325928, | |
0.007698250770568848, | |
0.00775953197479248, | |
0.0077251319885253905, | |
0.007707211971282959, | |
0.0076921720504760744, | |
0.007641770839691162, | |
0.0073950510025024415, | |
0.007345130920410156, | |
0.007360011100769043, | |
0.00722321081161499, | |
0.007112811088562012, | |
0.007185131072998047, | |
0.006862090110778808, | |
0.006842570781707764, | |
0.007117129802703857, | |
0.007697930812835693, | |
0.007738892078399658, | |
0.007304811000823975, | |
0.0072832112312316896, | |
0.007246571063995361, | |
0.007186730861663818, | |
0.007173291206359863, | |
0.007230250835418701, | |
0.007254251003265381, | |
0.007242890834808349, | |
0.007352972030639648, | |
0.007103690147399902, | |
0.0068430500030517575, | |
0.006844328880310059, | |
0.006855210781097412, | |
0.006871530055999756, | |
0.006885770797729492, | |
0.006861609935760498, | |
0.0068617711067199706, | |
0.006888649940490723, | |
0.006852810859680176, | |
0.006869609832763672 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 141.91785872684488 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |