IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
4b531d7
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-classification", | |
"library": "transformers", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.3.0", | |
"optimum_benchmark_commit": "748abd0c7ac21cfb1798768cad39007b466ce8e8", | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1009.283072, | |
"max_global_vram": 897.757184, | |
"max_process_vram": 199577.481216, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.443712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 144, | |
"total": 0.9971801538467412, | |
"mean": 0.006924862179491255, | |
"stdev": 0.0004506721662233124, | |
"p50": 0.006799014568328857, | |
"p90": 0.0072383089542388925, | |
"p95": 0.007481276130676269, | |
"p99": 0.009047464542388901, | |
"values": [ | |
0.0075162920951843265, | |
0.007142054080963135, | |
0.006784934997558594, | |
0.0067676548957824705, | |
0.006787654876708985, | |
0.006765574932098389, | |
0.006756295204162597, | |
0.006754374980926513, | |
0.006751814842224121, | |
0.0067689352035522465, | |
0.006696774959564209, | |
0.006686534881591797, | |
0.006716135025024414, | |
0.006697414875030517, | |
0.00667085599899292, | |
0.006663974761962891, | |
0.006668775081634521, | |
0.006668135166168213, | |
0.006715015888214111, | |
0.006664135932922363, | |
0.006696455001831055, | |
0.006699015140533447, | |
0.006710215091705322, | |
0.006707334995269775, | |
0.006678054809570313, | |
0.006665736198425293, | |
0.006702694892883301, | |
0.006698695182800293, | |
0.006709575176239014, | |
0.006718375205993652, | |
0.006736774921417236, | |
0.006710054874420166, | |
0.006702694892883301, | |
0.01038124179840088, | |
0.006963813781738281, | |
0.006810854911804199, | |
0.006730535984039307, | |
0.006777894973754883, | |
0.006733735084533691, | |
0.006740294933319092, | |
0.006731174945831299, | |
0.006722054958343506, | |
0.006976774215698242, | |
0.006770534992218018, | |
0.006786855220794677, | |
0.006760775089263916, | |
0.006741095066070557, | |
0.006889413833618164, | |
0.006726055145263672, | |
0.006741415023803711, | |
0.006739174842834473, | |
0.0070198140144348145, | |
0.006939333915710449, | |
0.00678077507019043, | |
0.00679981517791748, | |
0.006762535095214844, | |
0.010038043022155762, | |
0.007071333885192871, | |
0.006929094791412353, | |
0.006928935050964356, | |
0.006793254852294922, | |
0.006757894992828369, | |
0.0067602949142456055, | |
0.006739815235137939, | |
0.006827014923095703, | |
0.006761254787445069, | |
0.006765095233917237, | |
0.006798213958740235, | |
0.006763814926147461, | |
0.006737895011901855, | |
0.006760934829711914, | |
0.0067486147880554195, | |
0.0067548551559448245, | |
0.0067498950958251955, | |
0.006747175216674805, | |
0.006719494819641113, | |
0.006745894908905029, | |
0.0067486147880554195, | |
0.006763494968414306, | |
0.006709734916687012, | |
0.0067622151374816895, | |
0.006732295036315918, | |
0.00676061487197876, | |
0.006793574810028076, | |
0.006790695190429688, | |
0.006766214847564698, | |
0.006800934791564941, | |
0.0068623747825622555, | |
0.006839654922485352, | |
0.006835333824157715, | |
0.006825415134429932, | |
0.00681517505645752, | |
0.006815013885498047, | |
0.00678509521484375, | |
0.00687469482421875, | |
0.006825254917144775, | |
0.006841734886169433, | |
0.006847653865814209, | |
0.00685613489151001, | |
0.006818214893341065, | |
0.0068599748611450195, | |
0.006860935211181641, | |
0.006845414161682129, | |
0.006831974983215332, | |
0.006809094905853272, | |
0.006863814830780029, | |
0.006827494144439697, | |
0.006818534851074219, | |
0.007227333068847657, | |
0.0076705322265625, | |
0.007734372138977051, | |
0.007508131980895996, | |
0.0070462141036987305, | |
0.00728221321105957, | |
0.007174373149871826, | |
0.007200772762298584, | |
0.007171974182128906, | |
0.007296133041381836, | |
0.007671492099761963, | |
0.007656132221221924, | |
0.007156773090362549, | |
0.00724301290512085, | |
0.0072702131271362306, | |
0.007180774211883545, | |
0.007161094188690186, | |
0.007219333171844483, | |
0.007303173065185547, | |
0.007329092979431152, | |
0.007247172832489014, | |
0.007187812805175781, | |
0.007127813816070557, | |
0.007113093852996826, | |
0.006889894962310791, | |
0.006847175121307373, | |
0.006818055152893066, | |
0.006824294090270996, | |
0.006834374904632568, | |
0.006858695030212403, | |
0.006828134059906006, | |
0.0068391752243042, | |
0.006870053768157959, | |
0.006801575183868408, | |
0.00685277509689331, | |
0.0068457350730896 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 144.40720610463708 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |