File size: 446 Bytes
0f52316
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
  "base_model_name": "TheBloke_Llama-2-13B-fp16",
  "base_model_class": "LlamaForCausalLM",
  "base_loaded_in_4bit": false,
  "base_loaded_in_8bit": true,
  "projections": "q, v",
  "loss": 2.5449,
  "learning_rate": 0.0001125,
  "epoch": 0.99,
  "current_steps": 383,
  "train_runtime": 604.1501,
  "train_samples_per_second": 2.569,
  "train_steps_per_second": 0.02,
  "total_flos": 3.081650980257792e+16,
  "train_loss": 2.6347402334213257
}