gpt2-xl-lora-multi-6 / all_results.json
MHGanainy's picture
MHGanainy/gpt2-xl-lora-multi-6
0f457ab verified
raw
history blame
384 Bytes
{
"epoch": 1.0,
"eval_loss": 2.448012590408325,
"eval_runtime": 1408.7239,
"eval_samples_per_second": 8.454,
"eval_steps_per_second": 0.529,
"perplexity": 11.56533879845621,
"total_flos": 8.45892909419987e+17,
"train_loss": 2.683517217184325,
"train_runtime": 1637.9851,
"train_samples_per_second": 56.701,
"train_steps_per_second": 3.544
}