hf-transformers-bot's picture
Upload folder using huggingface_hub
9bf5a6d verified
raw
history blame
1.84 kB
{
"google/gemma-2b": {
"backend.cache_implementation=null,backend.torch_compile=False": {
"0a7af19f4dc868bafc82f35eb7e8d13bac87a594": {
"metrics": {
"prefill.latency.mean": 0.020197423934936523,
"prefill.throughput.value": 346.57885196397444,
"decode.latency.mean": 2.43524609375,
"decode.throughput.value": 52.15078686541882,
"per_token.latency.mean": 0.019173916261027178,
"per_token.throughput.value": 52.154186259413045
}
}
},
"backend.cache_implementation=static,backend.torch_compile=False": {
"0a7af19f4dc868bafc82f35eb7e8d13bac87a594": {
"metrics": {
"prefill.latency.mean": 0.02142367935180664,
"prefill.throughput.value": 326.7412606887106,
"decode.latency.mean": 2.5716962890625,
"decode.throughput.value": 49.3837474277716,
"per_token.latency.mean": 0.020248555844224345,
"per_token.throughput.value": 49.38623809486334
}
}
},
"backend.cache_implementation=static,backend.torch_compile=True": {
"0a7af19f4dc868bafc82f35eb7e8d13bac87a594": {
"metrics": {
"prefill.latency.mean": 0.014151840209960936,
"prefill.throughput.value": 494.6353192338173,
"decode.latency.mean": 1.564446533203125,
"decode.throughput.value": 81.17886888724406,
"per_token.latency.mean": 0.012317361369846375,
"per_token.throughput.value": 81.18621918879954
}
}
}
}
}