hf-transformers-bot's picture
Upload folder using huggingface_hub
62649ec verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "c6b23fda65f9ae74f9a1026b340241f65aebe1a3",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02075985622406006,
"prefill.throughput.value": 337.1892331261527,
"decode.latency.mean": 2.495531494140625,
"decode.throughput.value": 50.89096262587318,
"per_token.latency.mean": 0.01964858857853206,
"per_token.throughput.value": 50.89424087654797
}
},
{
"model": "google/gemma-2b",
"commit": "c6b23fda65f9ae74f9a1026b340241f65aebe1a3",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02313372802734375,
"prefill.throughput.value": 302.5884972679758,
"decode.latency.mean": 2.6958218994140624,
"decode.throughput.value": 47.10993705763852,
"per_token.latency.mean": 0.02122594771047277,
"per_token.throughput.value": 47.11214847224962
}
},
{
"model": "google/gemma-2b",
"commit": "c6b23fda65f9ae74f9a1026b340241f65aebe1a3",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014197376251220703,
"prefill.throughput.value": 493.0488476276124,
"decode.latency.mean": 1.5657416381835936,
"decode.throughput.value": 81.11172169332602,
"per_token.latency.mean": 0.012327540893254317,
"per_token.throughput.value": 81.11917929610797
}
}
]