IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-generation_openai-community/gpt2/benchmark.json with huggingface_hub
1133da8
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-generation_openai-community/gpt2", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-generation", | |
"library": "transformers", | |
"model": "openai-community/gpt2", | |
"processor": "openai-community/gpt2", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.3.0", | |
"optimum_benchmark_commit": "2a75c0bc0d007cc875fa0f75ca41d02e46f917be", | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"prefill": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1048.9856, | |
"max_global_vram": 1107.542016, | |
"max_process_vram": 242311.254016, | |
"max_reserved": 725.614592, | |
"max_allocated": 513.035776 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 61, | |
"total": 0.5013564329147339, | |
"mean": 0.008218957916634979, | |
"stdev": 0.0005692078366503006, | |
"p50": 0.007973114013671874, | |
"p90": 0.008567033767700195, | |
"p95": 0.00867231273651123, | |
"p99": 0.010879223442077635, | |
"values": [ | |
0.010654711723327636, | |
0.008580154418945312, | |
0.00867231273651123, | |
0.008499194145202636, | |
0.0083071928024292, | |
0.008196634292602539, | |
0.00825887393951416, | |
0.00828719425201416, | |
0.008308794021606445, | |
0.008343354225158691, | |
0.008477912902832032, | |
0.008630073547363282, | |
0.008447994232177734, | |
0.008513593673706054, | |
0.008434873580932617, | |
0.007897274017333984, | |
0.007973595142364502, | |
0.007887514114379882, | |
0.007918394088745118, | |
0.007972474098205567, | |
0.007853435039520264, | |
0.007888154029846191, | |
0.007903674125671386, | |
0.007902394771575928, | |
0.011215991020202636, | |
0.0083025541305542, | |
0.007987675189971923, | |
0.007911034107208252, | |
0.007886874198913573, | |
0.007914714813232422, | |
0.007912633895874023, | |
0.007922073841094971, | |
0.007893754005432128, | |
0.00788639497756958, | |
0.007973114013671874, | |
0.007924953937530517, | |
0.00793007516860962, | |
0.007887993812561035, | |
0.00790735387802124, | |
0.007929113864898682, | |
0.008885594367980957, | |
0.008327033996582031, | |
0.008274873733520509, | |
0.008441912651062011, | |
0.008410874366760255, | |
0.008567033767700195, | |
0.008473113059997558, | |
0.008494394302368164, | |
0.00808575439453125, | |
0.007954073905944823, | |
0.008048153877258301, | |
0.007875675201416016, | |
0.007927673816680908, | |
0.007939673900604249, | |
0.007929594039916991, | |
0.00789695405960083, | |
0.007922073841094971, | |
0.007915194988250732, | |
0.007883513927459717, | |
0.007914073944091797, | |
0.00799311399459839 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 243.33985163156106 | |
}, | |
"energy": null, | |
"efficiency": null | |
}, | |
"decode": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1048.9856, | |
"max_global_vram": 1107.53792, | |
"max_process_vram": 224864.817152, | |
"max_reserved": 725.614592, | |
"max_allocated": 513.25952 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 61, | |
"total": 0.5015626688003539, | |
"mean": 0.008222338832792688, | |
"stdev": 0.00032212106557148805, | |
"p50": 0.008111353874206542, | |
"p90": 0.00858575439453125, | |
"p95": 0.008656953811645508, | |
"p99": 0.009422968864440918, | |
"values": [ | |
0.009330872535705567, | |
0.008569913864135742, | |
0.008504794120788575, | |
0.0083887939453125, | |
0.008323034286499024, | |
0.008065274238586426, | |
0.008352793693542481, | |
0.008150394439697266, | |
0.008215033531188964, | |
0.00852639389038086, | |
0.008404793739318848, | |
0.008384794235229491, | |
0.00842271327972412, | |
0.009561113357543945, | |
0.007979514122009278, | |
0.007920794010162354, | |
0.007998554229736329, | |
0.008153914451599121, | |
0.007932474136352539, | |
0.008121753692626953, | |
0.008017913818359375, | |
0.007996473789215088, | |
0.008143513679504395, | |
0.00804383373260498, | |
0.008879034042358399, | |
0.00864127254486084, | |
0.007984794139862061, | |
0.008111353874206542, | |
0.007988473892211915, | |
0.008021273612976075, | |
0.008049593925476075, | |
0.00803183364868164, | |
0.00824239444732666, | |
0.007877433776855469, | |
0.007966713905334473, | |
0.00804447364807129, | |
0.008034394264221192, | |
0.00809791374206543, | |
0.008105753898620605, | |
0.008656953811645508, | |
0.008656952857971191, | |
0.008279033660888672, | |
0.00858575439453125, | |
0.008211354255676269, | |
0.00828991413116455, | |
0.008371193885803223, | |
0.008488313674926758, | |
0.008312953948974609, | |
0.008002074241638183, | |
0.008064474105834961, | |
0.00795199489593506, | |
0.008123193740844727, | |
0.008049114227294922, | |
0.008050073623657226, | |
0.007849915027618408, | |
0.007949433803558349, | |
0.008021273612976075, | |
0.008124154090881347, | |
0.007998713970184326, | |
0.007907193183898925, | |
0.008032475471496582 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 121.61989676365035 | |
}, | |
"energy": null, | |
"efficiency": null | |
}, | |
"per_token": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 121, | |
"total": 0.9941551127433775, | |
"mean": 0.008216157956556839, | |
"stdev": 0.0009260189860430026, | |
"p50": 0.00864271354675293, | |
"p90": 0.009282233238220215, | |
"p95": 0.009432792663574219, | |
"p99": 0.009684248733520508, | |
"values": [ | |
0.008524953842163086, | |
0.009432792663574219, | |
0.00779711389541626, | |
0.009491673469543457, | |
0.007664315223693848, | |
0.009387673377990723, | |
0.007458714008331299, | |
0.009282233238220215, | |
0.0074294347763061526, | |
0.009133112907409669, | |
0.0073251152038574215, | |
0.009040312767028809, | |
0.007430874824523926, | |
0.009253273010253906, | |
0.007371514797210693, | |
0.00913343334197998, | |
0.007377274990081787, | |
0.009232792854309082, | |
0.007568953990936279, | |
0.009482552528381347, | |
0.007514874935150146, | |
0.009625753402709962, | |
0.00747279405593872, | |
0.009404474258422852, | |
0.007543993949890137, | |
0.00950191307067871, | |
0.00864271354675293, | |
0.009399832725524903, | |
0.007136155128479004, | |
0.008778553009033203, | |
0.007217595100402832, | |
0.008711994171142578, | |
0.0071545538902282714, | |
0.008767833709716797, | |
0.007248634815216064, | |
0.00885455322265625, | |
0.007238234996795654, | |
0.008694232940673828, | |
0.007183034896850586, | |
0.008823513984680176, | |
0.007190874099731445, | |
0.008751833915710449, | |
0.007220634937286377, | |
0.008708792686462402, | |
0.007227355003356933, | |
0.008853754043579102, | |
0.007196794033050537, | |
0.012099350929260254, | |
0.00807359504699707, | |
0.009238713264465332, | |
0.007681593894958496, | |
0.008982873916625977, | |
0.007203673839569092, | |
0.008721274375915527, | |
0.007255034923553467, | |
0.00878047275543213, | |
0.007199834823608398, | |
0.00873535442352295, | |
0.007188154220581055, | |
0.008773754119873047, | |
0.007158074855804443, | |
0.008844633102416993, | |
0.007160154819488526, | |
0.008803993225097657, | |
0.007311194896697998, | |
0.008848633766174317, | |
0.007202394008636474, | |
0.00867695426940918, | |
0.00713807487487793, | |
0.008790713310241699, | |
0.007194554805755616, | |
0.00881839370727539, | |
0.007162074089050293, | |
0.008798873901367188, | |
0.0072307147979736325, | |
0.008804153442382813, | |
0.007289275169372559, | |
0.00877535343170166, | |
0.007880155086517335, | |
0.009698872566223144, | |
0.007954234123229981, | |
0.00906031322479248, | |
0.007500794887542725, | |
0.009141753196716309, | |
0.007809753894805908, | |
0.009249913215637206, | |
0.007478555202484131, | |
0.009177912712097168, | |
0.007493434906005859, | |
0.009399673461914063, | |
0.007604313850402832, | |
0.00927039337158203, | |
0.007648475170135498, | |
0.009365273475646972, | |
0.0075164752006530765, | |
0.00891679286956787, | |
0.0071899151802062985, | |
0.008871672630310058, | |
0.007283995151519776, | |
0.00885903263092041, | |
0.007206234931945801, | |
0.008765274047851563, | |
0.007226553916931153, | |
0.008858234405517577, | |
0.007190074920654297, | |
0.008829113006591796, | |
0.007142075061798096, | |
0.008868952751159669, | |
0.007165595054626465, | |
0.008723194122314454, | |
0.007153435230255127, | |
0.008774712562561035, | |
0.0071452751159667965, | |
0.008822234153747558, | |
0.007198554039001465, | |
0.008839994430541993, | |
0.007146554946899414, | |
0.008798233032226562, | |
0.007187674045562744, | |
0.008740472793579101, | |
0.007175674915313721 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 121.71138934858939 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |