IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub
8f388d7
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "multiple-choice", | |
"library": "transformers", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": "074fc31a7e13e70facbebbf8b363a0c312f75f69", | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1006.485504, | |
"max_global_vram": 898.473984, | |
"max_process_vram": 183203.61472, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.507712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 125, | |
"total": 0.999640771865845, | |
"mean": 0.007997126174926757, | |
"stdev": 0.00080058813664221, | |
"p50": 0.007822042942047119, | |
"p90": 0.008252120018005371, | |
"p95": 0.008352695083618165, | |
"p99": 0.012736585006713885, | |
"values": [ | |
0.014267610549926759, | |
0.013548894882202149, | |
0.010164270401000977, | |
0.007834362030029298, | |
0.00782156276702881, | |
0.007853561878204346, | |
0.007829243183135987, | |
0.007822042942047119, | |
0.007823482990264892, | |
0.00781484317779541, | |
0.007838201999664307, | |
0.007819962024688721, | |
0.007830041885375977, | |
0.007824761867523193, | |
0.007823482036590576, | |
0.007844282150268554, | |
0.007847161769866943, | |
0.007787002086639404, | |
0.007855162143707276, | |
0.00784780216217041, | |
0.007838521957397461, | |
0.007835962772369386, | |
0.007823802947998048, | |
0.007796603202819824, | |
0.007842522144317627, | |
0.007823162078857422, | |
0.007832602024078369, | |
0.007862202167510986, | |
0.007868442058563232, | |
0.00788060188293457, | |
0.007831161975860595, | |
0.007811161994934082, | |
0.007806042194366455, | |
0.007764603137969971, | |
0.007783642768859863, | |
0.0077678017616271975, | |
0.007810041904449463, | |
0.007750843048095703, | |
0.007767003059387207, | |
0.007785562038421631, | |
0.007911642074584961, | |
0.0077814021110534665, | |
0.0077492427825927735, | |
0.007733723163604736, | |
0.007774841785430908, | |
0.007787322998046875, | |
0.007804603099822998, | |
0.0077911620140075685, | |
0.007781722068786621, | |
0.0077911620140075685, | |
0.007812121868133545, | |
0.00779420280456543, | |
0.007778683185577393, | |
0.007776762008666992, | |
0.0078124418258666995, | |
0.00782108211517334, | |
0.007795641899108887, | |
0.007811323165893554, | |
0.007824122905731201, | |
0.007784442901611328, | |
0.0078095622062683105, | |
0.007814682006835938, | |
0.007819161891937257, | |
0.007764122009277344, | |
0.007746683120727539, | |
0.007851481914520264, | |
0.007817401885986328, | |
0.007796761989593506, | |
0.007791801929473877, | |
0.007815802097320556, | |
0.00782044219970703, | |
0.007780282974243164, | |
0.007812122821807862, | |
0.007742843151092529, | |
0.007807002067565918, | |
0.00812428092956543, | |
0.008430679321289063, | |
0.008114199638366699, | |
0.008314680099487305, | |
0.008724437713623048, | |
0.008189559936523438, | |
0.008362198829650879, | |
0.00837228012084961, | |
0.008293080329895019, | |
0.008314680099487305, | |
0.008228919982910156, | |
0.008257559776306152, | |
0.00826908016204834, | |
0.0083018798828125, | |
0.008243960380554199, | |
0.008184439659118652, | |
0.007942522048950195, | |
0.007915482044219971, | |
0.00790412187576294, | |
0.007887482166290283, | |
0.007779003143310547, | |
0.008055960655212403, | |
0.0078095622062683105, | |
0.007804282188415527, | |
0.007795162200927734, | |
0.007807002067565918, | |
0.007801881790161133, | |
0.007819642066955566, | |
0.007801562786102295, | |
0.0079916410446167, | |
0.007836441993713378, | |
0.007839481830596923, | |
0.007833881855010986, | |
0.007789882183074951, | |
0.007771643161773681, | |
0.007792121887207031, | |
0.00785548210144043, | |
0.007904440879821777, | |
0.007811802864074707, | |
0.007806522846221924, | |
0.007815803050994873, | |
0.007811800956726074, | |
0.007789562225341797, | |
0.007830202102661133, | |
0.007823162078857422, | |
0.00782716178894043, | |
0.007831002235412598, | |
0.007835001945495605, | |
0.007850681781768799, | |
0.007855641841888427 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 125.04491965317261 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |