hf-transformers-bot's picture
Upload folder using huggingface_hub
bebc9e5 verified
raw
history blame
1.84 kB
{
"google/gemma-2b": {
"backend.cache_implementation=null,backend.torch_compile=False": {
"c409cd81777fb27aadc043ed3d8339dbc020fb3b": {
"metrics": {
"prefill.latency.mean": 0.020490752220153806,
"prefill.throughput.value": 341.61752212859744,
"decode.latency.mean": 2.4355491943359375,
"decode.throughput.value": 52.14429677517849,
"per_token.latency.mean": 0.019176319017185,
"per_token.throughput.value": 52.14765143945731
}
}
},
"backend.cache_implementation=static,backend.torch_compile=False": {
"c409cd81777fb27aadc043ed3d8339dbc020fb3b": {
"metrics": {
"prefill.latency.mean": 0.021842111587524415,
"prefill.throughput.value": 320.48183491554903,
"decode.latency.mean": 2.61135791015625,
"decode.throughput.value": 48.63370107409022,
"per_token.latency.mean": 0.020560823410514767,
"per_token.throughput.value": 48.636184457894906
}
}
},
"backend.cache_implementation=static,backend.torch_compile=True": {
"c409cd81777fb27aadc043ed3d8339dbc020fb3b": {
"metrics": {
"prefill.latency.mean": 0.014161775588989258,
"prefill.throughput.value": 494.2883013513137,
"decode.latency.mean": 1.5649481201171875,
"decode.throughput.value": 81.15284996827236,
"per_token.latency.mean": 0.01232130015741183,
"per_token.throughput.value": 81.16026614272958
}
}
}
}
}