sharpenb's picture
Upload folder using huggingface_hub (#1)
d476333 verified
raw
history blame
1.49 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_token_generation_latency_sync": 83.92937774658203,
"base_token_generation_latency_async": 83.90798717737198,
"base_token_generation_throughput_sync": 0.011914779149434649,
"base_token_generation_throughput_async": 0.011917816570740916,
"base_token_generation_CO2_emissions": 2.0898577111994714e-05,
"base_token_generation_energy_consumption": 0.006837025531716293,
"base_inference_latency_sync": 81.4477310180664,
"base_inference_latency_async": 80.29217720031738,
"base_inference_throughput_sync": 0.012277812868454053,
"base_inference_throughput_async": 0.012454513439150423,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_token_generation_latency_sync": 209.10842590332032,
"smashed_token_generation_latency_async": 208.9206464588642,
"smashed_token_generation_throughput_sync": 0.004782208061105784,
"smashed_token_generation_throughput_async": 0.004786506345589432,
"smashed_token_generation_CO2_emissions": 6.253313396231958e-05,
"smashed_token_generation_energy_consumption": 0.0220396598549638,
"smashed_inference_latency_sync": 206.4805877685547,
"smashed_inference_latency_async": 205.32312393188477,
"smashed_inference_throughput_sync": 0.004843070289595,
"smashed_inference_throughput_async": 0.004870372030437968
}