hf-transformers-bot's picture
Upload folder using huggingface_hub
f23d28c verified
raw
history blame
17 kB
{
"config": {
"name": "pytorch_generate",
"backend": {
"name": "pytorch",
"version": "2.3.0+cu121",
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
"task": "text-generation",
"library": "transformers",
"model": "google/gemma-2b",
"processor": "google/gemma-2b",
"device": "cuda",
"device_ids": "0",
"seed": 42,
"inter_op_num_threads": null,
"intra_op_num_threads": null,
"model_kwargs": {},
"processor_kwargs": {},
"hub_kwargs": {},
"no_weights": true,
"device_map": null,
"torch_dtype": "float16",
"eval_mode": true,
"to_bettertransformer": false,
"low_cpu_mem_usage": null,
"attn_implementation": null,
"cache_implementation": "static",
"autocast_enabled": false,
"autocast_dtype": null,
"torch_compile": false,
"torch_compile_target": "forward",
"torch_compile_config": {
"backend": "inductor",
"mode": "reduce-overhead",
"fullgraph": true
},
"quantization_scheme": null,
"quantization_config": {},
"deepspeed_inference": false,
"deepspeed_inference_config": {},
"peft_type": null,
"peft_config": {}
},
"scenario": {
"name": "inference",
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
"iterations": 2,
"duration": 0,
"warmup_runs": 10,
"input_shapes": {
"batch_size": 1,
"num_choices": 2,
"sequence_length": 7
},
"new_tokens": null,
"latency": true,
"memory": true,
"energy": false,
"forward_kwargs": {},
"generate_kwargs": {
"max_new_tokens": 128,
"min_new_tokens": 128,
"do_sample": false
},
"call_kwargs": {}
},
"launcher": {
"name": "process",
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
"device_isolation": true,
"device_isolation_action": "warn",
"start_method": "spawn"
},
"environment": {
"cpu": " AMD EPYC 7R32",
"cpu_count": 16,
"cpu_ram_mb": 66697.29792,
"system": "Linux",
"machine": "x86_64",
"platform": "Linux-5.10.217-205.860.amzn2.x86_64-x86_64-with-glibc2.29",
"processor": "x86_64",
"python_version": "3.8.10",
"gpu": [
"NVIDIA A10G"
],
"gpu_count": 1,
"gpu_vram_mb": 24146608128,
"optimum_benchmark_version": "0.2.1",
"optimum_benchmark_commit": null,
"transformers_version": "4.42.0.dev0",
"transformers_commit": "bdf36dcd48106a4a0278ed7f3cc26cd65ab7b066",
"accelerate_version": "0.31.0.dev0",
"accelerate_commit": null,
"diffusers_version": null,
"diffusers_commit": null,
"optimum_version": "1.21.0.dev0",
"optimum_commit": null,
"timm_version": "0.9.16",
"timm_commit": null,
"peft_version": "0.11.2.dev0",
"peft_commit": null
}
},
"report": {
"prefill": {
"memory": {
"unit": "MB",
"max_ram": 1757.618176,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5036.936192
},
"latency": {
"unit": "s",
"count": 2,
"total": 0.0454350414276123,
"mean": 0.02271752071380615,
"stdev": 0.00012868785858154326,
"p50": 0.02271752071380615,
"p90": 0.02282047100067139,
"p95": 0.02283333978652954,
"p99": 0.022843634815216066,
"values": [
0.02258883285522461,
0.022846208572387696
]
},
"throughput": {
"unit": "tokens/s",
"value": 308.1322160188845
},
"energy": null,
"efficiency": null
},
"decode": {
"memory": {
"unit": "MB",
"max_ram": 1757.63456,
"max_global_vram": 6793.199616,
"max_process_vram": 0.0,
"max_reserved": 6146.752512,
"max_allocated": 5036.936704
},
"latency": {
"unit": "s",
"count": 2,
"total": 5.265699951171875,
"mean": 2.6328499755859376,
"stdev": 0.01761730957031249,
"p50": 2.6328499755859376,
"p90": 2.6469438232421876,
"p95": 2.6487055541992186,
"p99": 2.650114938964844,
"values": [
2.65046728515625,
2.615232666015625
]
},
"throughput": {
"unit": "tokens/s",
"value": 48.23670212038432
},
"energy": null,
"efficiency": null
},
"per_token": {
"memory": null,
"latency": {
"unit": "s",
"count": 253,
"total": 5.267439617156982,
"mean": 0.02081991943540309,
"stdev": 0.00151601123328956,
"p50": 0.020757503509521484,
"p90": 0.021109760284423826,
"p95": 0.0213805061340332,
"p99": 0.022162309799194337,
"values": [
0.02224742317199707,
0.022152191162109376,
0.021110815048217775,
0.021014495849609374,
0.021028863906860353,
0.020974592208862306,
0.0212490234375,
0.0208035831451416,
0.02090291213989258,
0.021222400665283202,
0.021135360717773437,
0.02081279945373535,
0.02186444854736328,
0.02187468719482422,
0.022158336639404298,
0.021526527404785157,
0.020960256576538085,
0.02102783966064453,
0.02108415985107422,
0.020773887634277344,
0.020750335693359375,
0.020810752868652343,
0.020932607650756836,
0.020792320251464845,
0.02088243293762207,
0.02123161506652832,
0.02123980712890625,
0.0211015682220459,
0.0210513916015625,
0.020964351654052735,
0.020794368743896483,
0.020965375900268556,
0.021109760284423826,
0.021106687545776368,
0.02082611274719238,
0.020907007217407226,
0.02083328056335449,
0.020813823699951172,
0.020757503509521484,
0.020742143630981445,
0.02082918357849121,
0.020578304290771485,
0.020818944931030273,
0.0208721923828125,
0.020755456924438476,
0.02065510368347168,
0.020398080825805662,
0.020354047775268554,
0.02046771240234375,
0.020151296615600587,
0.020343807220458983,
0.02028646469116211,
0.02062233543395996,
0.020505599975585938,
0.020379648208618165,
0.021276735305786134,
0.02080761528015137,
0.02205900764465332,
0.02039091110229492,
0.020363264083862305,
0.020769792556762694,
0.021003263473510742,
0.022165504455566407,
0.021411840438842773,
0.02123673629760742,
0.02106470489501953,
0.020728832244873048,
0.02090598487854004,
0.021086208343505858,
0.02106879997253418,
0.02101043128967285,
0.02102579116821289,
0.020958208084106447,
0.020958208084106447,
0.020977664947509765,
0.020887615203857422,
0.020373439788818358,
0.020366336822509764,
0.020361215591430663,
0.020371456146240235,
0.020371488571166992,
0.020403167724609376,
0.020455423355102538,
0.02033667182922363,
0.020337631225585937,
0.020295679092407228,
0.020321279525756835,
0.020356096267700196,
0.020348928451538087,
0.020337663650512695,
0.020388864517211915,
0.020361215591430663,
0.020360191345214843,
0.020393983840942383,
0.020307968139648438,
0.020351999282836913,
0.020711423873901368,
0.02082508850097656,
0.02087116813659668,
0.02083430480957031,
0.020982784271240236,
0.021003263473510742,
0.021003263473510742,
0.021013504028320314,
0.021053440093994142,
0.020978687286376953,
0.021001216888427734,
0.0210565128326416,
0.020769792556762694,
0.0210513916015625,
0.02101862335205078,
0.02104115104675293,
0.020989952087402345,
0.021028863906860353,
0.021004287719726563,
0.020932607650756836,
0.020982784271240236,
0.02060492706298828,
0.020985855102539062,
0.02099404716491699,
0.020951135635375977,
0.020848543167114257,
0.02089369583129883,
0.020366336822509764,
0.02079641532897949,
0.020816896438598635,
0.04401971054077149,
0.02084659194946289,
0.020912128448486327,
0.02101862335205078,
0.020936704635620116,
0.020898815155029296,
0.020934656143188478,
0.02105036735534668,
0.020953088760375976,
0.020993024826049804,
0.020657215118408203,
0.020946880340576172,
0.02090598487854004,
0.020891647338867187,
0.02087833595275879,
0.020739072799682616,
0.020327423095703127,
0.021061632156372072,
0.020931583404541015,
0.020322303771972656,
0.020592639923095703,
0.020702207565307617,
0.021176319122314453,
0.02105548858642578,
0.02063052749633789,
0.020933631896972657,
0.020993024826049804,
0.020961280822753905,
0.02089574432373047,
0.021178367614746094,
0.02215936088562012,
0.02124185562133789,
0.020974592208862306,
0.020937728881835937,
0.0209039363861084,
0.02084556770324707,
0.020348928451538087,
0.020364288330078126,
0.020319232940673827,
0.020343807220458983,
0.020327423095703127,
0.020344831466674804,
0.020319232940673827,
0.020374528884887694,
0.020395008087158203,
0.020363264083862305,
0.020331520080566406,
0.020341760635375978,
0.020342784881591795,
0.020373504638671876,
0.020322303771972656,
0.02039091110229492,
0.020371456146240235,
0.02040934371948242,
0.020365312576293947,
0.020362239837646484,
0.020362239837646484,
0.020337663650512695,
0.020319232940673827,
0.020353023529052734,
0.020335615158081053,
0.020350976943969725,
0.020364288330078126,
0.020331520080566406,
0.020288511276245116,
0.020350976943969725,
0.02040934371948242,
0.020398080825805662,
0.020374528884887694,
0.020333568572998048,
0.02043801689147949,
0.02040115165710449,
0.020340736389160157,
0.020365312576293947,
0.020350976943969725,
0.020396032333374024,
0.020966400146484376,
0.020337663650512695,
0.020388864517211915,
0.02083328056335449,
0.02089369583129883,
0.020406272888183592,
0.020477951049804686,
0.020619264602661135,
0.020312063217163084,
0.020377599716186523,
0.020304960250854494,
0.0203703670501709,
0.020312063217163084,
0.020398080825805662,
0.020351999282836913,
0.02043084716796875,
0.021365760803222656,
0.02063974380493164,
0.020363264083862305,
0.020343807220458983,
0.020335615158081053,
0.020298751831054687,
0.020324352264404297,
0.020373504638671876,
0.02032640075683594,
0.02035916709899902,
0.02028544044494629,
0.020350976943969725,
0.020298751831054687,
0.020331520080566406,
0.020311040878295897,
0.020357120513916017,
0.02049945640563965,
0.020477951049804686,
0.02164838409423828,
0.021109760284423826,
0.020926464080810548,
0.020962303161621093,
0.020943872451782225,
0.020427776336669923,
0.020428800582885744,
0.02038374328613281,
0.020330495834350586,
0.021402624130249022,
0.021046272277832033,
0.020758527755737305,
0.020347904205322266,
0.020341760635375978,
0.020321279525756835,
0.02031820869445801,
0.02027008056640625
]
},
"throughput": {
"unit": "tokens/s",
"value": 48.03092553276439
},
"energy": null,
"efficiency": null
}
}
}