File size: 381 Bytes
f3ddbbe
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
{
    "epoch": 4.0,
    "eval_loss": 2.4195876121520996,
    "eval_runtime": 85.2391,
    "eval_samples_per_second": 85.63,
    "eval_steps_per_second": 0.68,
    "perplexity": 11.241222615286187,
    "total_flos": 429332983971840.0,
    "train_loss": 2.284057284167586,
    "train_runtime": 15107.0718,
    "train_samples_per_second": 17.393,
    "train_steps_per_second": 0.272
}