|
{ |
|
"config": { |
|
"name": "pytorch_generate", |
|
"backend": { |
|
"name": "pytorch", |
|
"version": "2.3.0+cu121", |
|
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", |
|
"task": "text-generation", |
|
"library": "transformers", |
|
"model": "google/gemma-2b", |
|
"processor": "google/gemma-2b", |
|
"device": "cuda", |
|
"device_ids": "0", |
|
"seed": 42, |
|
"inter_op_num_threads": null, |
|
"intra_op_num_threads": null, |
|
"model_kwargs": {}, |
|
"processor_kwargs": {}, |
|
"hub_kwargs": {}, |
|
"no_weights": true, |
|
"device_map": null, |
|
"torch_dtype": "float16", |
|
"eval_mode": true, |
|
"to_bettertransformer": false, |
|
"low_cpu_mem_usage": null, |
|
"attn_implementation": null, |
|
"cache_implementation": null, |
|
"autocast_enabled": false, |
|
"autocast_dtype": null, |
|
"torch_compile": false, |
|
"torch_compile_target": "forward", |
|
"torch_compile_config": { |
|
"backend": "inductor", |
|
"mode": "reduce-overhead", |
|
"fullgraph": true |
|
}, |
|
"quantization_scheme": null, |
|
"quantization_config": {}, |
|
"deepspeed_inference": false, |
|
"deepspeed_inference_config": {}, |
|
"peft_type": null, |
|
"peft_config": {} |
|
}, |
|
"scenario": { |
|
"name": "inference", |
|
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", |
|
"iterations": 2, |
|
"duration": 0, |
|
"warmup_runs": 10, |
|
"input_shapes": { |
|
"batch_size": 1, |
|
"num_choices": 2, |
|
"sequence_length": 7 |
|
}, |
|
"new_tokens": null, |
|
"latency": true, |
|
"memory": true, |
|
"energy": false, |
|
"forward_kwargs": {}, |
|
"generate_kwargs": { |
|
"max_new_tokens": 128, |
|
"min_new_tokens": 128, |
|
"do_sample": false |
|
}, |
|
"call_kwargs": {} |
|
}, |
|
"launcher": { |
|
"name": "process", |
|
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", |
|
"device_isolation": true, |
|
"device_isolation_action": "warn", |
|
"start_method": "spawn" |
|
}, |
|
"environment": { |
|
"cpu": " AMD EPYC 7R32", |
|
"cpu_count": 16, |
|
"cpu_ram_mb": 66697.29792, |
|
"system": "Linux", |
|
"machine": "x86_64", |
|
"platform": "Linux-5.10.216-204.855.amzn2.x86_64-x86_64-with-glibc2.29", |
|
"processor": "x86_64", |
|
"python_version": "3.8.10", |
|
"gpu": [ |
|
"NVIDIA A10G" |
|
], |
|
"gpu_count": 1, |
|
"gpu_vram_mb": 24146608128, |
|
"optimum_benchmark_version": "0.2.1", |
|
"optimum_benchmark_commit": null, |
|
"transformers_version": "4.42.0.dev0", |
|
"transformers_commit": "8d2efea701bb524efb9917bf10657f4fd8021e9a", |
|
"accelerate_version": "0.31.0.dev0", |
|
"accelerate_commit": null, |
|
"diffusers_version": null, |
|
"diffusers_commit": null, |
|
"optimum_version": "1.21.0.dev0", |
|
"optimum_commit": null, |
|
"timm_version": "0.9.16", |
|
"timm_commit": null, |
|
"peft_version": "0.11.2.dev0", |
|
"peft_commit": null |
|
} |
|
}, |
|
"report": { |
|
"prefill": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1714.19648, |
|
"max_global_vram": 6795.296768, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6148.849664, |
|
"max_allocated": 5033.552896 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 0.04570534324645996, |
|
"mean": 0.02285267162322998, |
|
"stdev": 0.003733311653137207, |
|
"p50": 0.02285267162322998, |
|
"p90": 0.025839320945739746, |
|
"p95": 0.026212652111053465, |
|
"p99": 0.026511317043304443, |
|
"values": [ |
|
0.026585983276367187, |
|
0.019119359970092772 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 306.3099192693263 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"decode": { |
|
"memory": { |
|
"unit": "MB", |
|
"max_ram": 1714.200576, |
|
"max_global_vram": 6795.296768, |
|
"max_process_vram": 0.0, |
|
"max_reserved": 6148.849664, |
|
"max_allocated": 5033.553408 |
|
}, |
|
"latency": { |
|
"unit": "s", |
|
"count": 2, |
|
"total": 4.715706298828125, |
|
"mean": 2.3578531494140624, |
|
"stdev": 0.001009887695312628, |
|
"p50": 2.3578531494140624, |
|
"p90": 2.3586610595703124, |
|
"p95": 2.3587620483398437, |
|
"p99": 2.358842839355469, |
|
"values": [ |
|
2.35684326171875, |
|
2.358863037109375 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 53.86255714507076 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
}, |
|
"per_token": { |
|
"memory": null, |
|
"latency": { |
|
"unit": "s", |
|
"count": 253, |
|
"total": 4.714851266860956, |
|
"mean": 0.01863577575834372, |
|
"stdev": 0.0012413036969916396, |
|
"p50": 0.018550783157348632, |
|
"p90": 0.018630245971679688, |
|
"p95": 0.01893580780029297, |
|
"p99": 0.01997886192321777, |
|
"values": [ |
|
0.019871679306030274, |
|
0.0200949764251709, |
|
0.01862860870361328, |
|
0.018367488861083983, |
|
0.01838489532470703, |
|
0.018348031997680665, |
|
0.01836649513244629, |
|
0.018358240127563475, |
|
0.018363391876220703, |
|
0.01842790412902832, |
|
0.01841868782043457, |
|
0.018347007751464844, |
|
0.018358272552490236, |
|
0.01820364761352539, |
|
0.018223104476928712, |
|
0.018292736053466797, |
|
0.0183767032623291, |
|
0.018440191268920898, |
|
0.018386943817138672, |
|
0.018423807144165038, |
|
0.018536447525024414, |
|
0.018448383331298827, |
|
0.01841868782043457, |
|
0.01840025520324707, |
|
0.018408447265625, |
|
0.018288639068603514, |
|
0.01822822380065918, |
|
0.0182476806640625, |
|
0.018318336486816408, |
|
0.01843916893005371, |
|
0.01840742492675781, |
|
0.01840332794189453, |
|
0.01844121551513672, |
|
0.01839923286437988, |
|
0.018447359085083007, |
|
0.01842790412902832, |
|
0.018362367630004883, |
|
0.01844121551513672, |
|
0.018408447265625, |
|
0.01839820861816406, |
|
0.018255872726440428, |
|
0.01819443130493164, |
|
0.018242559432983398, |
|
0.018206720352172853, |
|
0.018249727249145507, |
|
0.018353151321411132, |
|
0.018258943557739257, |
|
0.018160640716552736, |
|
0.01826304054260254, |
|
0.018176000595092775, |
|
0.018335744857788085, |
|
0.018506752014160157, |
|
0.02190540885925293, |
|
0.019738624572753907, |
|
0.01860710334777832, |
|
0.01863577651977539, |
|
0.018694143295288086, |
|
0.0186060791015625, |
|
0.018332672119140626, |
|
0.01840025520324707, |
|
0.018550783157348632, |
|
0.018572288513183592, |
|
0.01858355140686035, |
|
0.018589696884155273, |
|
0.01861631965637207, |
|
0.018594816207885743, |
|
0.018546688079833985, |
|
0.01862451171875, |
|
0.018646015167236327, |
|
0.018589696884155273, |
|
0.018618368148803712, |
|
0.018559040069580077, |
|
0.018597824096679687, |
|
0.018532352447509767, |
|
0.018589696884155273, |
|
0.018587648391723634, |
|
0.018619392395019533, |
|
0.01862041664123535, |
|
0.018550783157348632, |
|
0.01859891128540039, |
|
0.018563072204589845, |
|
0.018557952880859374, |
|
0.01862144088745117, |
|
0.018535423278808593, |
|
0.018569215774536133, |
|
0.018911231994628908, |
|
0.018527231216430663, |
|
0.01860403251647949, |
|
0.018553855895996094, |
|
0.018528255462646484, |
|
0.018503679275512695, |
|
0.018579456329345705, |
|
0.018546688079833985, |
|
0.01860710334777832, |
|
0.018561023712158203, |
|
0.018565120697021483, |
|
0.018611200332641603, |
|
0.01863167953491211, |
|
0.018601984024047852, |
|
0.018534400939941405, |
|
0.018527296066284178, |
|
0.01855686378479004, |
|
0.018551807403564453, |
|
0.018539520263671876, |
|
0.018585599899291993, |
|
0.018566144943237304, |
|
0.018565120697021483, |
|
0.018559999465942383, |
|
0.018587648391723634, |
|
0.018601984024047852, |
|
0.01857535934448242, |
|
0.018584575653076172, |
|
0.018536447525024414, |
|
0.018592767715454102, |
|
0.01858355140686035, |
|
0.018546688079833985, |
|
0.018601984024047852, |
|
0.01862041664123535, |
|
0.018516992568969725, |
|
0.018544639587402344, |
|
0.018520063400268554, |
|
0.018561023712158203, |
|
0.018544639587402344, |
|
0.018570240020751954, |
|
0.018563072204589845, |
|
0.018559999465942383, |
|
0.03768115234375, |
|
0.01840127944946289, |
|
0.018375680923461913, |
|
0.01839308738708496, |
|
0.01840947151184082, |
|
0.018455551147460936, |
|
0.018404352188110353, |
|
0.018420736312866212, |
|
0.0184268798828125, |
|
0.018507776260375978, |
|
0.018877439498901367, |
|
0.01867580795288086, |
|
0.018562976837158202, |
|
0.01838489532470703, |
|
0.01823744010925293, |
|
0.018191360473632814, |
|
0.018192384719848635, |
|
0.01825279998779297, |
|
0.01839206314086914, |
|
0.01846784019470215, |
|
0.01843609619140625, |
|
0.018413568496704103, |
|
0.01843609619140625, |
|
0.018469888687133788, |
|
0.018367488861083983, |
|
0.01846886444091797, |
|
0.01848320007324219, |
|
0.018456575393676757, |
|
0.018488319396972656, |
|
0.018487295150756835, |
|
0.018491392135620118, |
|
0.01845964813232422, |
|
0.0184770565032959, |
|
0.018528352737426756, |
|
0.01862544059753418, |
|
0.019636224746704102, |
|
0.019417087554931642, |
|
0.019311616897583008, |
|
0.019309568405151366, |
|
0.018490367889404297, |
|
0.01849241638183594, |
|
0.01845452880859375, |
|
0.018431999206542968, |
|
0.01844428825378418, |
|
0.01844534492492676, |
|
0.018448352813720703, |
|
0.018509824752807616, |
|
0.01836851119995117, |
|
0.01841663932800293, |
|
0.018397184371948243, |
|
0.01840025520324707, |
|
0.018437120437622072, |
|
0.018415615081787108, |
|
0.018435136795043945, |
|
0.018466751098632814, |
|
0.01841663932800293, |
|
0.018453504562377928, |
|
0.01863065528869629, |
|
0.01861222457885742, |
|
0.018572288513183592, |
|
0.018581504821777343, |
|
0.018584575653076172, |
|
0.018601984024047852, |
|
0.018594816207885743, |
|
0.018540544509887694, |
|
0.018601984024047852, |
|
0.01859584045410156, |
|
0.018571264266967775, |
|
0.018579456329345705, |
|
0.018580480575561522, |
|
0.018597888946533202, |
|
0.018603008270263673, |
|
0.01859686470031738, |
|
0.018571264266967775, |
|
0.018597888946533202, |
|
0.018553855895996094, |
|
0.01858252716064453, |
|
0.01859891128540039, |
|
0.01861222457885742, |
|
0.01862348747253418, |
|
0.01857535934448242, |
|
0.018587648391723634, |
|
0.01859891128540039, |
|
0.018601984024047852, |
|
0.018579456329345705, |
|
0.01864908790588379, |
|
0.018593791961669923, |
|
0.01860710334777832, |
|
0.018675712585449217, |
|
0.018618368148803712, |
|
0.01864192008972168, |
|
0.018589696884155273, |
|
0.01857535934448242, |
|
0.018578432083129884, |
|
0.018553855895996094, |
|
0.018540544509887694, |
|
0.018542591094970702, |
|
0.018543615341186523, |
|
0.01861427116394043, |
|
0.018523136138916017, |
|
0.01856716728210449, |
|
0.018525184631347655, |
|
0.018549791336059572, |
|
0.018526176452636718, |
|
0.018552831649780274, |
|
0.01857535934448242, |
|
0.018556928634643553, |
|
0.018566144943237304, |
|
0.01860095977783203, |
|
0.018972671508789063, |
|
0.018766847610473633, |
|
0.018597888946533202, |
|
0.01859891128540039, |
|
0.018550783157348632, |
|
0.018590751647949218, |
|
0.018606048583984375, |
|
0.018557952880859374, |
|
0.018546688079833985, |
|
0.018566144943237304, |
|
0.018593791961669923, |
|
0.019168256759643554, |
|
0.019219455718994142, |
|
0.019418111801147463, |
|
0.01863372802734375, |
|
0.018603008270263673, |
|
0.018603008270263673, |
|
0.01861631965637207 |
|
] |
|
}, |
|
"throughput": { |
|
"unit": "tokens/s", |
|
"value": 53.6602292798181 |
|
}, |
|
"energy": null, |
|
"efficiency": null |
|
} |
|
} |
|
} |