sharpenb's picture
Upload folder using huggingface_hub (#3)
fb7374c verified
raw
history blame
1.61 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 54.04956970214844,
"base_token_generation_latency_async": 54.09220550209284,
"base_token_generation_throughput_sync": 0.01850153489677552,
"base_token_generation_throughput_async": 0.018486951876297775,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 53.23417663574219,
"base_inference_latency_async": 52.190279960632324,
"base_inference_throughput_sync": 0.018784924708097873,
"base_inference_throughput_async": 0.01916065598334231,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 40.57583160400391,
"smashed_token_generation_latency_async": 40.6801862642169,
"smashed_token_generation_throughput_sync": 0.024645212691126284,
"smashed_token_generation_throughput_async": 0.024581991672924564,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 51.8927360534668,
"smashed_inference_latency_async": 41.21739864349365,
"smashed_inference_throughput_sync": 0.019270519846355125,
"smashed_inference_throughput_async": 0.02426159905552056,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}