File size: 204 Bytes
fea7bda
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
{
    "epoch": 2.4,
    "total_flos": 2.759631525568512e+16,
    "train_loss": 2.123395072089301,
    "train_runtime": 273.545,
    "train_samples_per_second": 5.034,
    "train_steps_per_second": 0.033
}