benchmark_results / 2024-05-31 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
a9f5e37 verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "48681e6e5ef08e5af8b3feb5683f044321605f1f",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.019090847969055177,
"prefill.throughput.value": 366.66784059809555,
"decode.latency.mean": 2.3359691162109373,
"decode.throughput.value": 54.36715713348153,
"per_token.latency.mean": 0.018466484107518857,
"per_token.throughput.value": 54.15215989018926
}
},
{
"model": "google/gemma-2b",
"commit": "48681e6e5ef08e5af8b3feb5683f044321605f1f",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.021794272422790528,
"prefill.throughput.value": 321.1853033772312,
"decode.latency.mean": 2.5525396728515624,
"decode.throughput.value": 49.7543686982629,
"per_token.latency.mean": 0.020181032497421085,
"per_token.throughput.value": 49.55147860387168
}
},
{
"model": "google/gemma-2b",
"commit": "48681e6e5ef08e5af8b3feb5683f044321605f1f",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.013933776378631592,
"prefill.throughput.value": 502.37637018023224,
"decode.latency.mean": 1.5579072265625,
"decode.throughput.value": 81.51961672340636,
"per_token.latency.mean": 0.012319213772950909,
"per_token.throughput.value": 81.17401146132258
}
}
]