IlyasMoutawwakil
HF staff
Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub
0dcc6db
verified
{ | |
"config": { | |
"name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base", | |
"backend": { | |
"name": "pytorch", | |
"version": "2.2.2+rocm5.7", | |
"_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend", | |
"task": "multiple-choice", | |
"library": "transformers", | |
"model": "FacebookAI/roberta-base", | |
"processor": "FacebookAI/roberta-base", | |
"device": "cuda", | |
"device_ids": "0", | |
"seed": 42, | |
"inter_op_num_threads": null, | |
"intra_op_num_threads": null, | |
"model_kwargs": {}, | |
"processor_kwargs": {}, | |
"hub_kwargs": {}, | |
"no_weights": true, | |
"device_map": null, | |
"torch_dtype": null, | |
"eval_mode": true, | |
"to_bettertransformer": false, | |
"low_cpu_mem_usage": null, | |
"attn_implementation": null, | |
"cache_implementation": null, | |
"autocast_enabled": false, | |
"autocast_dtype": null, | |
"torch_compile": false, | |
"torch_compile_target": "forward", | |
"torch_compile_config": {}, | |
"quantization_scheme": null, | |
"quantization_config": {}, | |
"deepspeed_inference": false, | |
"deepspeed_inference_config": {}, | |
"peft_type": null, | |
"peft_config": {} | |
}, | |
"scenario": { | |
"name": "inference", | |
"_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario", | |
"iterations": 1, | |
"duration": 1, | |
"warmup_runs": 1, | |
"input_shapes": { | |
"batch_size": 1, | |
"num_choices": 2, | |
"sequence_length": 2 | |
}, | |
"new_tokens": null, | |
"latency": true, | |
"memory": true, | |
"energy": false, | |
"forward_kwargs": {}, | |
"generate_kwargs": { | |
"max_new_tokens": 2, | |
"min_new_tokens": 2 | |
}, | |
"call_kwargs": { | |
"num_inference_steps": 2 | |
} | |
}, | |
"launcher": { | |
"name": "process", | |
"_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher", | |
"device_isolation": true, | |
"device_isolation_action": "error", | |
"numactl": false, | |
"numactl_kwargs": {}, | |
"start_method": "spawn" | |
}, | |
"environment": { | |
"cpu": " AMD EPYC 7763 64-Core Processor", | |
"cpu_count": 128, | |
"cpu_ram_mb": 1082015.236096, | |
"system": "Linux", | |
"machine": "x86_64", | |
"platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35", | |
"processor": "x86_64", | |
"python_version": "3.10.12", | |
"gpu": [ | |
"Advanced Micro Devices, Inc. [AMD/ATI]" | |
], | |
"gpu_count": 1, | |
"gpu_vram_mb": 68702699520, | |
"optimum_benchmark_version": "0.2.1", | |
"optimum_benchmark_commit": "156844ab796ad7cf3da92a0bf30b174d1bcc0aa5", | |
"transformers_version": "4.42.3", | |
"transformers_commit": null, | |
"accelerate_version": "0.31.0", | |
"accelerate_commit": null, | |
"diffusers_version": "0.29.2", | |
"diffusers_commit": null, | |
"optimum_version": null, | |
"optimum_commit": null, | |
"timm_version": "1.0.7", | |
"timm_commit": null, | |
"peft_version": null, | |
"peft_commit": null | |
} | |
}, | |
"report": { | |
"forward": { | |
"memory": { | |
"unit": "MB", | |
"max_ram": 1005.993984, | |
"max_global_vram": 898.469888, | |
"max_process_vram": 194605.273088, | |
"max_reserved": 555.74528, | |
"max_allocated": 499.507712 | |
}, | |
"latency": { | |
"unit": "s", | |
"count": 119, | |
"total": 0.9990508728027346, | |
"mean": 0.008395385485737265, | |
"stdev": 0.00029401200417222866, | |
"p50": 0.008298561096191407, | |
"p90": 0.008774881362915038, | |
"p95": 0.00903904104232788, | |
"p99": 0.009166138496398926, | |
"values": [ | |
0.009830561637878418, | |
0.00903520107269287, | |
0.00910736083984375, | |
0.009179040908813477, | |
0.009099202156066894, | |
0.009099201202392578, | |
0.00907360076904297, | |
0.008912800788879394, | |
0.008839040756225587, | |
0.008847681999206543, | |
0.008766401290893554, | |
0.008739200592041016, | |
0.008736801147460938, | |
0.00836112117767334, | |
0.008302241325378419, | |
0.008281121253967285, | |
0.008308482170104981, | |
0.008398880958557129, | |
0.008507680892944337, | |
0.008500480651855468, | |
0.008431361198425294, | |
0.008371841430664063, | |
0.008387201309204102, | |
0.008373762130737305, | |
0.008311841011047363, | |
0.008314081192016601, | |
0.008298561096191407, | |
0.00829664134979248, | |
0.008432320594787598, | |
0.008747361183166503, | |
0.00865088176727295, | |
0.008485441207885741, | |
0.00846880054473877, | |
0.008577760696411132, | |
0.008609601020812988, | |
0.008604480743408202, | |
0.00856432056427002, | |
0.008537601470947266, | |
0.008502560615539551, | |
0.008446721076965332, | |
0.00843440055847168, | |
0.008408161163330078, | |
0.008615361213684083, | |
0.008808801651000977, | |
0.008716160774230958, | |
0.008574081420898437, | |
0.008675361633300781, | |
0.008732160568237305, | |
0.008890082359313965, | |
0.008706561088562012, | |
0.008616162300109862, | |
0.008451360702514649, | |
0.00836112117767334, | |
0.008352481842041015, | |
0.008322561264038086, | |
0.00823760223388672, | |
0.008312001228332519, | |
0.00829072093963623, | |
0.008276802062988281, | |
0.008258400917053222, | |
0.00834208106994629, | |
0.008286722183227539, | |
0.008212640762329101, | |
0.008228161811828612, | |
0.00826064109802246, | |
0.00833712100982666, | |
0.008208802223205566, | |
0.008322720527648925, | |
0.008501280784606933, | |
0.008427681922912598, | |
0.008353760719299316, | |
0.008247041702270507, | |
0.008225761413574218, | |
0.008245281219482422, | |
0.008243362426757812, | |
0.00823552131652832, | |
0.008232320785522461, | |
0.008242881774902344, | |
0.008180960655212403, | |
0.008267842292785644, | |
0.008249441146850585, | |
0.008285120964050293, | |
0.008216002464294433, | |
0.00824592113494873, | |
0.008223681449890137, | |
0.008228482246398926, | |
0.00820112133026123, | |
0.008182722091674805, | |
0.00813568115234375, | |
0.008132000923156739, | |
0.008150881767272949, | |
0.008136160850524902, | |
0.008156961441040039, | |
0.008139202117919922, | |
0.008144001007080078, | |
0.008151361465454102, | |
0.008158721923828126, | |
0.0081264009475708, | |
0.008134720802307129, | |
0.00813888168334961, | |
0.008158560752868652, | |
0.008161601066589355, | |
0.008148801803588868, | |
0.008150561332702637, | |
0.00814256191253662, | |
0.008136481285095214, | |
0.008112960815429688, | |
0.008121602058410645, | |
0.008147200584411622, | |
0.008142721176147462, | |
0.008168961524963379, | |
0.00815872097015381, | |
0.008118560791015625, | |
0.008136801719665528, | |
0.008105120658874511, | |
0.008096800804138184, | |
0.008096482276916504, | |
0.008108000755310058, | |
0.00811872100830078 | |
] | |
}, | |
"throughput": { | |
"unit": "samples/s", | |
"value": 119.11305343856789 | |
}, | |
"energy": null, | |
"efficiency": null | |
} | |
} | |
} |