hf-transformers-bot's picture
Upload folder using huggingface_hub
b8c577a verified
[
{
"model": "google/gemma-2b",
"commit": "0cf60f13ab1c857c17fc3fb127129048c93bf06c",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.020097536087036134,
"prefill.throughput.value": 348.3014022059815,
"decode.latency.mean": 2.4510888671875,
"decode.throughput.value": 51.8137068386778,
"per_token.latency.mean": 0.019379450899810188,
"per_token.throughput.value": 51.60104923353605
}
},
{
"model": "google/gemma-2b",
"commit": "0cf60f13ab1c857c17fc3fb127129048c93bf06c",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02203115177154541,
"prefill.throughput.value": 317.7319130923028,
"decode.latency.mean": 2.5429818115234375,
"decode.throughput.value": 49.941371748906626,
"per_token.latency.mean": 0.02010650309083961,
"per_token.throughput.value": 49.73515262609704
}
},
{
"model": "google/gemma-2b",
"commit": "0cf60f13ab1c857c17fc3fb127129048c93bf06c",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014126336097717285,
"prefill.throughput.value": 495.52834872243693,
"decode.latency.mean": 1.5611622314453126,
"decode.throughput.value": 81.34964928175614,
"per_token.latency.mean": 0.012345619333591386,
"per_token.throughput.value": 81.00039155419968
}
}
]