sharpenb's picture
Upload folder using huggingface_hub (#1)
31db344 verified
raw
history blame
1.7 kB
{
"base_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"base_current_gpu_total_memory": 40339.3125,
"base_perplexity": 11.945297241210938,
"base_token_generation_latency_sync": 38.85285110473633,
"base_token_generation_latency_async": 38.682299479842186,
"base_token_generation_throughput_sync": 0.02573813688226591,
"base_token_generation_throughput_async": 0.025851617237002987,
"base_token_generation_CO2_emissions": null,
"base_token_generation_energy_consumption": null,
"base_inference_latency_sync": 122.01093063354492,
"base_inference_latency_async": 38.599491119384766,
"base_inference_throughput_sync": 0.008195986989095764,
"base_inference_throughput_async": 0.025907077295581168,
"base_inference_CO2_emissions": null,
"base_inference_energy_consumption": null,
"smashed_current_gpu_type": "NVIDIA A100-PCIE-40GB",
"smashed_current_gpu_total_memory": 40339.3125,
"smashed_perplexity": 13.2249116897583,
"smashed_token_generation_latency_sync": 62.686103439331056,
"smashed_token_generation_latency_async": 63.241347298026085,
"smashed_token_generation_throughput_sync": 0.015952498961238216,
"smashed_token_generation_throughput_async": 0.015812439847106362,
"smashed_token_generation_CO2_emissions": null,
"smashed_token_generation_energy_consumption": null,
"smashed_inference_latency_sync": 198.40460815429688,
"smashed_inference_latency_async": 110.20867824554443,
"smashed_inference_throughput_sync": 0.0050402055138876216,
"smashed_inference_throughput_async": 0.009073695610176945,
"smashed_inference_CO2_emissions": null,
"smashed_inference_energy_consumption": null
}