Meta-Llama-3-8B-Base-SimPO2-6e-7 / train_results.json
tengxiao1
TT
d5e784a
raw
history blame contribute delete
234 Bytes
{
"epoch": 0.998691442030882,
"total_flos": 0.0,
"train_loss": -0.09237800735347676,
"train_runtime": 17432.5225,
"train_samples": 61135,
"train_samples_per_second": 3.507,
"train_steps_per_second": 0.027
}