File size: 215 Bytes
ddbf722 |
1 2 3 4 5 6 7 8 |
{
"epoch": 1.9310344827586206,
"total_flos": 923429584896.0,
"train_loss": 1.1873634883335658,
"train_runtime": 3416.1853,
"train_samples_per_second": 0.135,
"train_steps_per_second": 0.004
} |