IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-generation_openai-community/gpt2/benchmark.json with huggingface_hub
4fe6ff9
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-generation_openai-community/gpt2", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-generation", | |
"library": "transformers", | |
"model": "openai-community/gpt2", | |
"processor": "openai-community/gpt2", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.3.1", | |
"optimum_benchmark_commit": "2c8ab57de1af767ec2e6a2cf774f52cea6a0db26", | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"prefill": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1048.825856, | |
"max_global_vram": 1107.5584, | |
"max_process_vram": 269454.671872, | |
"max_reserved": 725.614592, | |
"max_allocated": 513.035776 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 58, | |
"total": 0.5212359266281126, | |
"mean": 0.008986826321174358, | |
"stdev": 0.00035361779917854184, | |
"p50": 0.008914514064788819, | |
"p90": 0.009299380207061768, | |
"p95": 0.009475652933120728, | |
"p99": 0.010059846420288086, | |
"values": [ | |
0.010749321937561035, | |
0.009256356239318848, | |
0.00926723575592041, | |
0.00918627643585205, | |
0.009094915390014648, | |
0.009002115249633789, | |
0.009020034790039062, | |
0.008926753997802734, | |
0.008981474876403809, | |
0.009170434951782226, | |
0.009497957229614258, | |
0.00947171688079834, | |
0.009284195899963378, | |
0.009223875045776367, | |
0.009194114685058593, | |
0.009299715995788575, | |
0.009286115646362305, | |
0.009373955726623535, | |
0.009164995193481445, | |
0.009179876327514649, | |
0.009083874702453613, | |
0.00917075538635254, | |
0.009299236297607422, | |
0.009189154624938965, | |
0.009539715766906738, | |
0.009091714859008788, | |
0.00900147533416748, | |
0.009183714866638184, | |
0.00874371337890625, | |
0.008902274131774902, | |
0.00869379425048828, | |
0.00865267276763916, | |
0.00874691390991211, | |
0.008645313262939453, | |
0.008646753311157227, | |
0.008704672813415528, | |
0.008678433418273925, | |
0.008898114204406738, | |
0.008773154258728027, | |
0.008724353790283202, | |
0.008727072715759277, | |
0.008725954055786132, | |
0.009204195022583008, | |
0.008704833030700684, | |
0.008748514175415039, | |
0.008850914001464844, | |
0.008822914123535155, | |
0.008710594177246093, | |
0.008688673973083497, | |
0.008660193443298339, | |
0.008685632705688477, | |
0.008634913444519042, | |
0.00868211269378662, | |
0.008656352996826171, | |
0.008690113067626954, | |
0.008631393432617188, | |
0.008692672729492187, | |
0.008717633247375489 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 222.5479750607497 | |
}, | |
"energy": null, | |
"efficiency": null | |
}, | |
"decode": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1049.096192, | |
"max_global_vram": 1107.554304, | |
"max_process_vram": 268485.398528, | |
"max_reserved": 725.614592, | |
"max_allocated": 513.25952 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 58, | |
"total": 0.4918804502487183, | |
"mean": 0.00848069741808135, | |
"stdev": 0.00038451940138313815, | |
"p50": 0.00836779260635376, | |
"p90": 0.00884758586883545, | |
"p95": 0.008938818216323852, | |
"p99": 0.009986144456863404, | |
"values": [ | |
0.008895234107971191, | |
0.008845953941345215, | |
0.008664993286132813, | |
0.008700833320617676, | |
0.008466113090515136, | |
0.008433152198791503, | |
0.008420831680297851, | |
0.008382111549377442, | |
0.008064351081848144, | |
0.008687233924865723, | |
0.009185794830322266, | |
0.008832674026489257, | |
0.008534111976623536, | |
0.008677152633666993, | |
0.008612353324890137, | |
0.008825473785400391, | |
0.008580992698669434, | |
0.00887763500213623, | |
0.00859315299987793, | |
0.00854099178314209, | |
0.008563233375549316, | |
0.008542432785034179, | |
0.008514593124389649, | |
0.008851393699645996, | |
0.00869619369506836, | |
0.00864835262298584, | |
0.009765157699584961, | |
0.008187071800231933, | |
0.008371232986450195, | |
0.008302432060241699, | |
0.008342432022094727, | |
0.008212991714477539, | |
0.008301152229309081, | |
0.008163870811462403, | |
0.008223871231079101, | |
0.00828115177154541, | |
0.008409152984619141, | |
0.008304512023925781, | |
0.008364352226257324, | |
0.008240351676940918, | |
0.008234751701354981, | |
0.010279080390930177, | |
0.008228192329406738, | |
0.008236991882324218, | |
0.008182110786437989, | |
0.008200350761413575, | |
0.008225472450256347, | |
0.008177150726318359, | |
0.008237791061401368, | |
0.008180351257324218, | |
0.008208352088928223, | |
0.00824675178527832, | |
0.008177472114562987, | |
0.008150430679321289, | |
0.008205631256103515, | |
0.008179230690002441, | |
0.00821555233001709, | |
0.008207712173461914 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 117.91483066804632 | |
}, | |
"energy": null, | |
"efficiency": null | |
}, | |
"per_token": { | |
"memory": null, | |
"latency": { | |
"unit": "s", | |
"count": 115, | |
"total": 1.0043158655166624, | |
"mean": 0.008733181439275328, | |
"stdev": 0.0008817359958030435, | |
"p50": 0.009159235000610352, | |
"p90": 0.009855622100830078, | |
"p95": 0.009947125816345215, | |
"p99": 0.010215323543548583, | |
"values": [ | |
0.00835923194885254, | |
0.009836997985839844, | |
0.008233951568603516, | |
0.009928037643432617, | |
0.008073150634765625, | |
0.009827717781066895, | |
0.00810163116455078, | |
0.009741316795349121, | |
0.007885790824890137, | |
0.009628037452697753, | |
0.007833789825439454, | |
0.00966435718536377, | |
0.007851069927215576, | |
0.009541637420654297, | |
0.007838590145111084, | |
0.009568997383117676, | |
0.007669630050659179, | |
0.009604355812072755, | |
0.00804211139678955, | |
0.010190279960632324, | |
0.008489472389221191, | |
0.010219400405883788, | |
0.008192030906677247, | |
0.009972518920898437, | |
0.007969629764556885, | |
0.009835718154907226, | |
0.00807555103302002, | |
0.009847237586975097, | |
0.00801171112060547, | |
0.009947237968444824, | |
0.008213472366333009, | |
0.009947077751159668, | |
0.007981310844421387, | |
0.010020998001098633, | |
0.008246111869812011, | |
0.009849958419799805, | |
0.00798003101348877, | |
0.009848837852478027, | |
0.007968989849090577, | |
0.00970179843902588, | |
0.00796435022354126, | |
0.009824197769165039, | |
0.007960031032562256, | |
0.0099301176071167, | |
0.007895709991455079, | |
0.009859397888183593, | |
0.008190752029418946, | |
0.01025075912475586, | |
0.00809299087524414, | |
0.00974115753173828, | |
0.008100510597229004, | |
0.00959219741821289, | |
0.009098114967346192, | |
0.009897478103637695, | |
0.007682750225067139, | |
0.00928803539276123, | |
0.007775869846343994, | |
0.009540677070617676, | |
0.007766749858856201, | |
0.00927475643157959, | |
0.00776642894744873, | |
0.00926771640777588, | |
0.007681789875030517, | |
0.009313956260681152, | |
0.007763548851013184, | |
0.009217475891113281, | |
0.007653149127960205, | |
0.00920163631439209, | |
0.007693308830261231, | |
0.00927043628692627, | |
0.007757150173187256, | |
0.009240835189819337, | |
0.007834431171417236, | |
0.009514595985412598, | |
0.007778910160064698, | |
0.009337315559387207, | |
0.00783715009689331, | |
0.009292196273803711, | |
0.007715229988098145, | |
0.009288675308227539, | |
0.007736350059509277, | |
0.009271395683288575, | |
0.009598596572875977, | |
0.009933958053588867, | |
0.007712349891662597, | |
0.009257954597473144, | |
0.007708189964294433, | |
0.009313796043395996, | |
0.0076803498268127445, | |
0.009390275955200195, | |
0.007706429004669189, | |
0.009365796089172363, | |
0.0076971502304077144, | |
0.00927923583984375, | |
0.007652829170227051, | |
0.009248516082763673, | |
0.007730429172515869, | |
0.00920499610900879, | |
0.007683228969573974, | |
0.009217475891113281, | |
0.007717949867248535, | |
0.009159235000610352, | |
0.0077513899803161625, | |
0.009213154792785644, | |
0.0076822700500488285, | |
0.009188835144042968, | |
0.007655230045318604, | |
0.00921987533569336, | |
0.007701629161834716, | |
0.009169634819030761, | |
0.007659869194030762, | |
0.009244996070861817, | |
0.007697949886322022, | |
0.009272834777832032, | |
0.007699389934539795 | |
] | |
}, | |
"throughput": { | |
"unit": "tokens/s", | |
"value": 114.50580833036938 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |