IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
0a60067
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "text-classification", | |
"library": "transformers", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": "074fc31a7e13e70facbebbf8b363a0c312f75f69", | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1011.126272, | |
"max_global_vram": 897.14688, | |
"max_process_vram": 173018.673152, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.443712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 130, | |
"total": 0.9962900304794309, | |
"mean": 0.007663769465226394, | |
"stdev": 0.0003079632197259394, | |
"p50": 0.007585484027862549, | |
"p90": 0.007923450946807862, | |
"p95": 0.007962962436676026, | |
"p99": 0.008506558675765991, | |
"values": [ | |
0.007739963054656983, | |
0.007505885124206543, | |
0.007603484153747559, | |
0.00768860387802124, | |
0.007718204021453857, | |
0.0077044439315795896, | |
0.007718842983245849, | |
0.007763644218444824, | |
0.007672284126281738, | |
0.007655004024505615, | |
0.007663643836975097, | |
0.007642044067382813, | |
0.007631964206695556, | |
0.007640443801879883, | |
0.0076519641876220705, | |
0.007649084091186523, | |
0.007671644210815429, | |
0.007668764114379883, | |
0.007652763843536377, | |
0.007634684085845948, | |
0.0076660442352294925, | |
0.007651644229888916, | |
0.007734364032745362, | |
0.007669243812561035, | |
0.007691483974456787, | |
0.007640604019165039, | |
0.007692284107208252, | |
0.00763868522644043, | |
0.0076622037887573245, | |
0.00764220380783081, | |
0.007654843807220459, | |
0.007633563995361328, | |
0.007618683815002442, | |
0.007640443801879883, | |
0.007689243793487549, | |
0.0076622037887573245, | |
0.00763948392868042, | |
0.00763612413406372, | |
0.0076617240905761715, | |
0.007618524074554443, | |
0.010554671287536621, | |
0.007602203845977784, | |
0.007498844146728516, | |
0.007503964900970459, | |
0.0075007638931274415, | |
0.007533724784851074, | |
0.00751612377166748, | |
0.0075479640960693355, | |
0.007519004821777343, | |
0.007544603824615479, | |
0.007660443782806396, | |
0.007649563789367676, | |
0.007640764236450195, | |
0.007540444850921631, | |
0.007534843921661377, | |
0.007557884216308593, | |
0.007546685218811035, | |
0.007578043937683105, | |
0.007539484024047851, | |
0.007513565063476562, | |
0.00753356409072876, | |
0.0075436439514160155, | |
0.0075385251045227055, | |
0.007519003868103028, | |
0.007544603824615479, | |
0.007603484153747559, | |
0.00757980489730835, | |
0.0075574049949646, | |
0.00754172420501709, | |
0.007555963993072509, | |
0.007586044788360596, | |
0.007499965190887451, | |
0.0075668439865112305, | |
0.007500124931335449, | |
0.0075670042037963865, | |
0.007545083999633789, | |
0.007535484790802002, | |
0.007513564109802246, | |
0.007585723876953125, | |
0.007551004886627197, | |
0.007583004951477051, | |
0.007584444999694824, | |
0.007585244178771973, | |
0.007589724063873291, | |
0.007527324199676513, | |
0.0075532450675964355, | |
0.007570044994354248, | |
0.007540443897247315, | |
0.008049081802368165, | |
0.00846012020111084, | |
0.008478839874267579, | |
0.008517880439758301, | |
0.00794156312942505, | |
0.007927002906799317, | |
0.007923162937164307, | |
0.007926043033599853, | |
0.007967642784118652, | |
0.007957242012023925, | |
0.007992763042449951, | |
0.00793116283416748, | |
0.007955482006072998, | |
0.007743484020233155, | |
0.0076097240447998045, | |
0.007567644119262695, | |
0.0074938850402832035, | |
0.0075577239990234375, | |
0.007580763816833496, | |
0.007545723915100098, | |
0.007541725158691406, | |
0.007514523983001709, | |
0.007505245208740234, | |
0.007535164833068848, | |
0.007545244216918945, | |
0.0075623650550842286, | |
0.007526525020599365, | |
0.0075575637817382815, | |
0.007540605068206787, | |
0.00754156494140625, | |
0.007561563968658447, | |
0.0075479640960693355, | |
0.007568445205688477, | |
0.007560285091400146, | |
0.007573723793029785, | |
0.00757884407043457, | |
0.007548603057861328, | |
0.007540444850921631, | |
0.00754444408416748, | |
0.007557404041290283, | |
0.007618363857269287, | |
0.0075858840942382814 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 130.48409200425488 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |