File size: 1,724 Bytes
df108f2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
[
{
"model": "google/gemma-2b",
"commit": "940fde8dafaecb8f17b588c5078291f1c1a420c8",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02033520030975342,
"prefill.throughput.value": 344.2306883322204,
"decode.latency.mean": 2.4782998046875,
"decode.throughput.value": 51.244808945144555,
"per_token.latency.mean": 0.01958842790173919,
"per_token.throughput.value": 51.05054908011344
}
},
{
"model": "google/gemma-2b",
"commit": "940fde8dafaecb8f17b588c5078291f1c1a420c8",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.025318976402282714,
"prefill.throughput.value": 276.4724722192518,
"decode.latency.mean": 2.537893798828125,
"decode.throughput.value": 50.04149506123636,
"per_token.latency.mean": 0.020059904995643103,
"per_token.throughput.value": 49.85068474737015
}
},
{
"model": "google/gemma-2b",
"commit": "940fde8dafaecb8f17b588c5078291f1c1a420c8",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.01400380802154541,
"prefill.throughput.value": 499.86403621288036,
"decode.latency.mean": 1.5594651489257814,
"decode.throughput.value": 81.4381777543938,
"per_token.latency.mean": 0.012331704181173573,
"per_token.throughput.value": 81.09179277318934
}
}
] |