sharpenb's picture
Upload folder using huggingface_hub (#5)
09d6988 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 8.405904769897461,
"base_token_generation_latency_sync": 35.634182929992676,
"base_token_generation_latency_async": 36.07999123632908,
"base_token_generation_throughput_sync": 0.02806294175355757,
"base_token_generation_throughput_async": 0.027716192984911157,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 120.22138977050781,
"base_inference_latency_async": 39.18566703796387,
"base_inference_throughput_sync": 0.008317987355735225,
"base_inference_throughput_async": 0.02551953496239275,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 8.853601455688477,
"smashed_token_generation_latency_sync": 40.49961433410645,
"smashed_token_generation_latency_async": 40.31131472438574,
"smashed_token_generation_throughput_sync": 0.02469159315321819,
"smashed_token_generation_throughput_async": 0.02480693092837939,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 163.6304901123047,
"smashed_inference_latency_async": 70.32005786895752,
"smashed_inference_throughput_sync": 0.006111330469728893,
"smashed_inference_throughput_async": 0.014220693644244647,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}