sharpenb's picture
Upload folder using huggingface_hub (#2)
175ca90 verified
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 10.554253578186035,
"base_token_generation_latency_sync": 38.07420387268066,
"base_token_generation_latency_async": 37.691137939691544,
"base_token_generation_throughput_sync": 0.026264501901181674,
"base_token_generation_throughput_async": 0.02653143562818586,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 119.51892547607422,
"base_inference_latency_async": 38.97833824157715,
"base_inference_throughput_sync": 0.008366875756427245,
"base_inference_throughput_async": 0.02565527534299363,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 357458.53125,
"smashed_token_generation_latency_sync": 168.46778411865233,
"smashed_token_generation_latency_async": 168.45296267420053,
"smashed_token_generation_throughput_sync": 0.00593585298952883,
"smashed_token_generation_throughput_async": 0.005936375259448941,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 262.0371948242188,
"smashed_inference_latency_async": 209.70721244812012,
"smashed_inference_throughput_sync": 0.0038162521189819085,
"smashed_inference_throughput_async": 0.0047685532048517025,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}