benchmark_results / 2024-09-03 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
993304e verified
raw
history blame
1.73 kB
[
{
"model": "google/gemma-2b",
"commit": "cff06aac6fad28019930be03f5d467055bf62177",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02121953582763672,
"prefill.throughput.value": 329.8846900733365,
"decode.latency.mean": 2.5355677490234374,
"decode.throughput.value": 50.08740154898779,
"per_token.latency.mean": 0.019963835438405433,
"per_token.throughput.value": 50.0905751845785
}
},
{
"model": "google/gemma-2b",
"commit": "cff06aac6fad28019930be03f5d467055bf62177",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02198051166534424,
"prefill.throughput.value": 318.4639241604466,
"decode.latency.mean": 2.5951973876953125,
"decode.throughput.value": 48.93654740951456,
"per_token.latency.mean": 0.020433629719291146,
"per_token.throughput.value": 48.93893124900428
}
},
{
"model": "google/gemma-2b",
"commit": "cff06aac6fad28019930be03f5d467055bf62177",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014191952228546143,
"prefill.throughput.value": 493.2372859824019,
"decode.latency.mean": 1.5656904296874998,
"decode.throughput.value": 81.11437458639142,
"per_token.latency.mean": 0.012327145779226709,
"per_token.throughput.value": 81.12177935667528
}
}
]