Meta-Llama-3-8B-Base-MI-6e-7 / all_results.json
tengxiao1
TX
0d8bf93
raw
history blame
232 Bytes
{
"epoch": 0.998691442030882,
"total_flos": 0.0,
"train_loss": 2.0149041866606385,
"train_runtime": 17499.3556,
"train_samples": 61135,
"train_samples_per_second": 3.494,
"train_steps_per_second": 0.027
}