IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
3200240
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-classification", | |
"library": "transformers", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.3.0", | |
"optimum_benchmark_commit": "57f6495c03ea0fa48e157048c97add150dcd765c", | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1007.566848, | |
"max_global_vram": 897.14688, | |
"max_process_vram": 188195.753984, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.443712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 141, | |
"total": 0.9976116375923155, | |
"mean": 0.007075259841080252, | |
"stdev": 0.00019389678959233177, | |
"p50": 0.007010383129119873, | |
"p90": 0.007306541919708252, | |
"p95": 0.007548460960388184, | |
"p99": 0.007826956272125244, | |
"values": [ | |
0.007880300998687743, | |
0.0069843029975891115, | |
0.0070671820640563964, | |
0.0071979031562805175, | |
0.0071595020294189455, | |
0.007049423217773437, | |
0.007020462036132813, | |
0.007030542850494385, | |
0.007027822971343994, | |
0.007019022941589356, | |
0.007019022941589356, | |
0.00698654317855835, | |
0.0069892630577087405, | |
0.007010383129119873, | |
0.007097261905670166, | |
0.007084622859954834, | |
0.0070641422271728515, | |
0.00702702283859253, | |
0.007071022987365723, | |
0.007078701972961426, | |
0.0071881418228149415, | |
0.007088623046875, | |
0.007036783218383789, | |
0.007050702095031738, | |
0.007044622898101807, | |
0.007037582874298095, | |
0.007042381763458252, | |
0.007012302875518799, | |
0.007073903083801269, | |
0.007069421768188477, | |
0.007089582920074463, | |
0.007047661781311035, | |
0.007306541919708252, | |
0.006971023082733154, | |
0.007028623104095459, | |
0.006991182804107666, | |
0.007005423069000244, | |
0.007010383129119873, | |
0.00701166296005249, | |
0.007007503032684327, | |
0.006965743064880371, | |
0.006995663166046142, | |
0.006988303184509277, | |
0.007119661808013916, | |
0.006994382858276367, | |
0.0069732627868652345, | |
0.0069563031196594235, | |
0.006997262954711914, | |
0.006990222930908203, | |
0.006999982833862305, | |
0.007155823230743408, | |
0.007012942790985108, | |
0.006967982769012451, | |
0.007327501773834229, | |
0.00700126314163208, | |
0.006987983226776123, | |
0.006999341964721679, | |
0.0069647831916809086, | |
0.00698878288269043, | |
0.007002223014831543, | |
0.00699262285232544, | |
0.0069721431732177735, | |
0.007016783237457276, | |
0.006973903179168701, | |
0.007016462802886963, | |
0.00694670295715332, | |
0.0069820618629455565, | |
0.006955022811889648, | |
0.007009102821350098, | |
0.006977743148803711, | |
0.006972302913665771, | |
0.00697454309463501, | |
0.00698878288269043, | |
0.006976462841033935, | |
0.006990222930908203, | |
0.006954863071441651, | |
0.006951182842254639, | |
0.006952942848205567, | |
0.006944942951202393, | |
0.006950383186340332, | |
0.006961583137512207, | |
0.006936142921447754, | |
0.006936623096466065, | |
0.006967342853546142, | |
0.007092623233795166, | |
0.007485261917114258, | |
0.007800940990447998, | |
0.007796620845794678, | |
0.007548460960388184, | |
0.007353261947631836, | |
0.007786860942840576, | |
0.00777406120300293, | |
0.007844299793243407, | |
0.007637901782989502, | |
0.0073044619560241695, | |
0.007286861896514892, | |
0.007324781894683838, | |
0.0073326220512390135, | |
0.007332781791687012, | |
0.0070604619979858394, | |
0.007063823223114014, | |
0.007020143032073975, | |
0.007173902988433838, | |
0.007095662117004395, | |
0.00702094316482544, | |
0.006964622974395752, | |
0.00697678279876709, | |
0.0069748620986938474, | |
0.006958383083343506, | |
0.006949742794036865, | |
0.006977743148803711, | |
0.00696638298034668, | |
0.006989743232727051, | |
0.006972302913665771, | |
0.006976142883300781, | |
0.0069635028839111325, | |
0.007032302856445313, | |
0.006967662811279297, | |
0.007014381885528565, | |
0.0070033431053161625, | |
0.007015343189239502, | |
0.00699310302734375, | |
0.006993742942810058, | |
0.007072463035583496, | |
0.007028462886810302, | |
0.00701166296005249, | |
0.007011982917785644, | |
0.0069684629440307615, | |
0.006977423191070557, | |
0.006980943202972412, | |
0.007004942893981934, | |
0.007035182952880859, | |
0.007051982879638672, | |
0.007032143115997314, | |
0.0069844632148742675, | |
0.0069673418998718265, | |
0.006992143154144287, | |
0.006993903160095215, | |
0.007015663146972656, | |
0.006987823009490967, | |
0.0069990229606628415 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 141.33756532782263 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |