benchmark_results / 2024-09-08 /summaries.json
hf-transformers-bot's picture
Upload folder using huggingface_hub
554019b verified
raw
history blame
1.72 kB
[
{
"model": "google/gemma-2b",
"commit": "66bc4def9505fa7c7fe4aa7a248c34a026bb552b",
"config": "backend.cache_implementation=null,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.02079647922515869,
"prefill.throughput.value": 336.59543638192855,
"decode.latency.mean": 2.515798828125,
"decode.throughput.value": 50.4809838450604,
"per_token.latency.mean": 0.019808215719508374,
"per_token.throughput.value": 50.48410286723288
}
},
{
"model": "google/gemma-2b",
"commit": "66bc4def9505fa7c7fe4aa7a248c34a026bb552b",
"config": "backend.cache_implementation=static,backend.torch_compile=False",
"metrics": {
"prefill.latency.mean": 0.022945216178894043,
"prefill.throughput.value": 305.0744846082073,
"decode.latency.mean": 2.6843699951171875,
"decode.throughput.value": 47.31091475132352,
"per_token.latency.mean": 0.02113573500115102,
"per_token.throughput.value": 47.31323514159983
}
},
{
"model": "google/gemma-2b",
"commit": "66bc4def9505fa7c7fe4aa7a248c34a026bb552b",
"config": "backend.cache_implementation=static,backend.torch_compile=True",
"metrics": {
"prefill.latency.mean": 0.014140079975128174,
"prefill.throughput.value": 495.0467049912529,
"decode.latency.mean": 1.5638016357421876,
"decode.throughput.value": 81.21234630869613,
"per_token.latency.mean": 0.01231224945398766,
"per_token.throughput.value": 81.21992684903915
}
}
]