hf-transformers-bot's picture
Upload folder using huggingface_hub
4b893fb verified
raw
history blame
1.84 kB
{
"google/gemma-2b": {
"backend.cache_implementation=null,backend.torch_compile=False": {
"970a16ec7f693104085fd826523e5c6ce64f2040": {
"metrics": {
"prefill.latency.mean": 0.020231648445129397,
"prefill.throughput.value": 345.9925679800547,
"decode.latency.mean": 2.4315745849609374,
"decode.throughput.value": 52.2295309325419,
"per_token.latency.mean": 0.019144986190195157,
"per_token.throughput.value": 52.232996674196414
}
}
},
"backend.cache_implementation=static,backend.torch_compile=False": {
"970a16ec7f693104085fd826523e5c6ce64f2040": {
"metrics": {
"prefill.latency.mean": 0.021830528259277344,
"prefill.throughput.value": 320.65188331048296,
"decode.latency.mean": 2.566541015625,
"decode.throughput.value": 49.48294191553107,
"per_token.latency.mean": 0.020207986861702026,
"per_token.throughput.value": 49.48538450879489
}
}
},
"backend.cache_implementation=static,backend.torch_compile=True": {
"970a16ec7f693104085fd826523e5c6ce64f2040": {
"metrics": {
"prefill.latency.mean": 0.014175551891326904,
"prefill.throughput.value": 493.8079345103201,
"decode.latency.mean": 1.565439697265625,
"decode.throughput.value": 81.12736646568541,
"per_token.latency.mean": 0.012325130297443058,
"per_token.throughput.value": 81.13504489339618
}
}
}
}
}