jgrivolla commited on
Commit
ba37139
1 Parent(s): eb225af

Upload 12 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/gpfs/projects/bsc88/hf-models/fourth_epoch_bsc_7b_restart_mix1_all_fineweb_from_mix1_lr3e-5_lr3e-6_step68625_hf/",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 11008,
14
+ "max_position_embeddings": 8192,
15
+ "mlp_bias": false,
16
+ "model_type": "llama",
17
+ "num_attention_heads": 32,
18
+ "num_hidden_layers": 32,
19
+ "num_key_value_heads": 8,
20
+ "pretraining_tp": 1,
21
+ "rms_norm_eps": 1e-06,
22
+ "rope_scaling": null,
23
+ "rope_theta": 10000.0,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "bfloat16",
26
+ "transformers_version": "4.44.0",
27
+ "use_cache": true,
28
+ "vocab_size": 256000
29
+ }
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 2,
6
+ "repetition_penalty": 1.2,
7
+ "temperature": 0.1,
8
+ "top_p": 0.95,
9
+ "transformers_version": "4.44.0"
10
+ }
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:860dd360668f18c13477f09d43acbda55e7bb5002413220513e00330d83a6a1f
3
+ size 4982973048
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2d964f4af352865bd444f0a1e451a4bf850fe83384cea68e9dfbfb6fb306368
3
+ size 4995660232
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15df9035377f7b5e76e1d78dba144e9b45c278df10bfdfab410de389c2ac25dd
3
+ size 3460482936
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0834bb88afe1284919da841c832cc589b36f2bd264cab187ecc97de7d4270983
3
+ size 2097152128
model.safetensors.index.json ADDED
@@ -0,0 +1,298 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 15536234496
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00004-of-00004.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00004.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00004.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
242
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
243
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
244
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
245
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
246
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
247
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
248
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
249
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
250
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
251
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
252
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
253
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
254
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
255
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
256
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
257
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
258
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
259
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
260
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
261
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
262
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
266
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
267
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
269
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
271
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
274
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
275
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
276
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
277
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
278
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
279
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
280
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
281
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
282
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
283
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
284
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
285
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
286
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
287
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
288
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
289
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
290
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
291
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
292
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
293
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
294
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
295
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
296
+ "model.norm.weight": "model-00003-of-00004.safetensors"
297
+ }
298
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<|im_end|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "<|im_start|>",
12
+ "lstrip": false,
13
+ "normalized": false,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ }
17
+ ],
18
+ "bos_token": {
19
+ "content": "<s>",
20
+ "lstrip": false,
21
+ "normalized": false,
22
+ "rstrip": false,
23
+ "single_word": false
24
+ },
25
+ "eos_token": "<|im_end|>",
26
+ "pad_token": "<unk>",
27
+ "unk_token": {
28
+ "content": "<unk>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f055d86bd1a344221f15dcc85aaa96b6b80a616445504ef8fb5b74476e8140b4
3
+ size 19092375
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa490e57cebce5cb1a0a5b1a5d3fa4de05aee53dc3a44791f1c3401db44d802d
3
+ size 4813274
tokenizer_config.json ADDED
@@ -0,0 +1,1104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": false,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ },
30
+ "3": {
31
+ "content": "<pad>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "4": {
39
+ "content": "<|im_start|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": true
45
+ },
46
+ "5": {
47
+ "content": "<|im_end|>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": true
53
+ },
54
+ "6": {
55
+ "content": "<|reserved_token_1|>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": true
61
+ },
62
+ "7": {
63
+ "content": "<|reserved_token_2|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "8": {
71
+ "content": "<|reserved_token_3|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "9": {
79
+ "content": "<|reserved_token_4|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "10": {
87
+ "content": "<|reserved_token_5|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "11": {
95
+ "content": "<|reserved_token_6|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": true
101
+ },
102
+ "12": {
103
+ "content": "<|reserved_token_7|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": true
109
+ },
110
+ "13": {
111
+ "content": "<|reserved_token_8|>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": true
117
+ },
118
+ "14": {
119
+ "content": "<|reserved_token_9|>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": true
125
+ },
126
+ "15": {
127
+ "content": "<|reserved_token_10|>",
128
+ "lstrip": false,
129
+ "normalized": false,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": true
133
+ },
134
+ "16": {
135
+ "content": "<|reserved_token_11|>",
136
+ "lstrip": false,
137
+ "normalized": false,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": true
141
+ },
142
+ "17": {
143
+ "content": "<|reserved_token_12|>",
144
+ "lstrip": false,
145
+ "normalized": false,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": true
149
+ },
150
+ "18": {
151
+ "content": "<|reserved_token_13|>",
152
+ "lstrip": false,
153
+ "normalized": false,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": true
157
+ },
158
+ "19": {
159
+ "content": "<|reserved_token_14|>",
160
+ "lstrip": false,
161
+ "normalized": false,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": true
165
+ },
166
+ "20": {
167
+ "content": "<|reserved_token_15|>",
168
+ "lstrip": false,
169
+ "normalized": false,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": true
173
+ },
174
+ "21": {
175
+ "content": "<|reserved_token_16|>",
176
+ "lstrip": false,
177
+ "normalized": false,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": true
181
+ },
182
+ "22": {
183
+ "content": "<|reserved_token_17|>",
184
+ "lstrip": false,
185
+ "normalized": false,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": true
189
+ },
190
+ "23": {
191
+ "content": "<|reserved_token_18|>",
192
+ "lstrip": false,
193
+ "normalized": false,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": true
197
+ },
198
+ "24": {
199
+ "content": "<|reserved_token_19|>",
200
+ "lstrip": false,
201
+ "normalized": false,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": true
205
+ },
206
+ "25": {
207
+ "content": "<|reserved_token_20|>",
208
+ "lstrip": false,
209
+ "normalized": false,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": true
213
+ },
214
+ "26": {
215
+ "content": "<|reserved_token_21|>",
216
+ "lstrip": false,
217
+ "normalized": false,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": true
221
+ },
222
+ "27": {
223
+ "content": "<|reserved_token_22|>",
224
+ "lstrip": false,
225
+ "normalized": false,
226
+ "rstrip": false,
227
+ "single_word": false,
228
+ "special": true
229
+ },
230
+ "28": {
231
+ "content": "<|reserved_token_23|>",
232
+ "lstrip": false,
233
+ "normalized": false,
234
+ "rstrip": false,
235
+ "single_word": false,
236
+ "special": true
237
+ },
238
+ "29": {
239
+ "content": "<|reserved_token_24|>",
240
+ "lstrip": false,
241
+ "normalized": false,
242
+ "rstrip": false,
243
+ "single_word": false,
244
+ "special": true
245
+ },
246
+ "30": {
247
+ "content": "<|reserved_token_25|>",
248
+ "lstrip": false,
249
+ "normalized": false,
250
+ "rstrip": false,
251
+ "single_word": false,
252
+ "special": true
253
+ },
254
+ "31": {
255
+ "content": "<|reserved_token_26|>",
256
+ "lstrip": false,
257
+ "normalized": false,
258
+ "rstrip": false,
259
+ "single_word": false,
260
+ "special": true
261
+ },
262
+ "32": {
263
+ "content": "<|reserved_token_27|>",
264
+ "lstrip": false,
265
+ "normalized": false,
266
+ "rstrip": false,
267
+ "single_word": false,
268
+ "special": true
269
+ },
270
+ "33": {
271
+ "content": "<|reserved_token_28|>",
272
+ "lstrip": false,
273
+ "normalized": false,
274
+ "rstrip": false,
275
+ "single_word": false,
276
+ "special": true
277
+ },
278
+ "34": {
279
+ "content": "<|reserved_token_29|>",
280
+ "lstrip": false,
281
+ "normalized": false,
282
+ "rstrip": false,
283
+ "single_word": false,
284
+ "special": true
285
+ },
286
+ "35": {
287
+ "content": "<|reserved_token_30|>",
288
+ "lstrip": false,
289
+ "normalized": false,
290
+ "rstrip": false,
291
+ "single_word": false,
292
+ "special": true
293
+ },
294
+ "36": {
295
+ "content": "<|reserved_token_31|>",
296
+ "lstrip": false,
297
+ "normalized": false,
298
+ "rstrip": false,
299
+ "single_word": false,
300
+ "special": true
301
+ },
302
+ "37": {
303
+ "content": "<|reserved_token_32|>",
304
+ "lstrip": false,
305
+ "normalized": false,
306
+ "rstrip": false,
307
+ "single_word": false,
308
+ "special": true
309
+ },
310
+ "38": {
311
+ "content": "<|reserved_token_33|>",
312
+ "lstrip": false,
313
+ "normalized": false,
314
+ "rstrip": false,
315
+ "single_word": false,
316
+ "special": true
317
+ },
318
+ "39": {
319
+ "content": "<|reserved_token_34|>",
320
+ "lstrip": false,
321
+ "normalized": false,
322
+ "rstrip": false,
323
+ "single_word": false,
324
+ "special": true
325
+ },
326
+ "40": {
327
+ "content": "<|reserved_token_35|>",
328
+ "lstrip": false,
329
+ "normalized": false,
330
+ "rstrip": false,
331
+ "single_word": false,
332
+ "special": true
333
+ },
334
+ "41": {
335
+ "content": "<|reserved_token_36|>",
336
+ "lstrip": false,
337
+ "normalized": false,
338
+ "rstrip": false,
339
+ "single_word": false,
340
+ "special": true
341
+ },
342
+ "42": {
343
+ "content": "<|reserved_token_37|>",
344
+ "lstrip": false,
345
+ "normalized": false,
346
+ "rstrip": false,
347
+ "single_word": false,
348
+ "special": true
349
+ },
350
+ "43": {
351
+ "content": "<|reserved_token_38|>",
352
+ "lstrip": false,
353
+ "normalized": false,
354
+ "rstrip": false,
355
+ "single_word": false,
356
+ "special": true
357
+ },
358
+ "44": {
359
+ "content": "<|reserved_token_39|>",
360
+ "lstrip": false,
361
+ "normalized": false,
362
+ "rstrip": false,
363
+ "single_word": false,
364
+ "special": true
365
+ },
366
+ "45": {
367
+ "content": "<|reserved_token_40|>",
368
+ "lstrip": false,
369
+ "normalized": false,
370
+ "rstrip": false,
371
+ "single_word": false,
372
+ "special": true
373
+ },
374
+ "46": {
375
+ "content": "<|reserved_token_41|>",
376
+ "lstrip": false,
377
+ "normalized": false,
378
+ "rstrip": false,
379
+ "single_word": false,
380
+ "special": true
381
+ },
382
+ "47": {
383
+ "content": "<|reserved_token_42|>",
384
+ "lstrip": false,
385
+ "normalized": false,
386
+ "rstrip": false,
387
+ "single_word": false,
388
+ "special": true
389
+ },
390
+ "48": {
391
+ "content": "<|reserved_token_43|>",
392
+ "lstrip": false,
393
+ "normalized": false,
394
+ "rstrip": false,
395
+ "single_word": false,
396
+ "special": true
397
+ },
398
+ "49": {
399
+ "content": "<|reserved_token_44|>",
400
+ "lstrip": false,
401
+ "normalized": false,
402
+ "rstrip": false,
403
+ "single_word": false,
404
+ "special": true
405
+ },
406
+ "50": {
407
+ "content": "<|reserved_token_45|>",
408
+ "lstrip": false,
409
+ "normalized": false,
410
+ "rstrip": false,
411
+ "single_word": false,
412
+ "special": true
413
+ },
414
+ "51": {
415
+ "content": "<|reserved_token_46|>",
416
+ "lstrip": false,
417
+ "normalized": false,
418
+ "rstrip": false,
419
+ "single_word": false,
420
+ "special": true
421
+ },
422
+ "52": {
423
+ "content": "<|reserved_token_47|>",
424
+ "lstrip": false,
425
+ "normalized": false,
426
+ "rstrip": false,
427
+ "single_word": false,
428
+ "special": true
429
+ },
430
+ "53": {
431
+ "content": "<|reserved_token_48|>",
432
+ "lstrip": false,
433
+ "normalized": false,
434
+ "rstrip": false,
435
+ "single_word": false,
436
+ "special": true
437
+ },
438
+ "54": {
439
+ "content": "<|reserved_token_49|>",
440
+ "lstrip": false,
441
+ "normalized": false,
442
+ "rstrip": false,
443
+ "single_word": false,
444
+ "special": true
445
+ },
446
+ "55": {
447
+ "content": "<|reserved_token_50|>",
448
+ "lstrip": false,
449
+ "normalized": false,
450
+ "rstrip": false,
451
+ "single_word": false,
452
+ "special": true
453
+ },
454
+ "56": {
455
+ "content": "<|reserved_token_51|>",
456
+ "lstrip": false,
457
+ "normalized": false,
458
+ "rstrip": false,
459
+ "single_word": false,
460
+ "special": true
461
+ },
462
+ "57": {
463
+ "content": "<|reserved_token_52|>",
464
+ "lstrip": false,
465
+ "normalized": false,
466
+ "rstrip": false,
467
+ "single_word": false,
468
+ "special": true
469
+ },
470
+ "58": {
471
+ "content": "<|reserved_token_53|>",
472
+ "lstrip": false,
473
+ "normalized": false,
474
+ "rstrip": false,
475
+ "single_word": false,
476
+ "special": true
477
+ },
478
+ "59": {
479
+ "content": "<|reserved_token_54|>",
480
+ "lstrip": false,
481
+ "normalized": false,
482
+ "rstrip": false,
483
+ "single_word": false,
484
+ "special": true
485
+ },
486
+ "60": {
487
+ "content": "<|reserved_token_55|>",
488
+ "lstrip": false,
489
+ "normalized": false,
490
+ "rstrip": false,
491
+ "single_word": false,
492
+ "special": true
493
+ },
494
+ "61": {
495
+ "content": "<|reserved_token_56|>",
496
+ "lstrip": false,
497
+ "normalized": false,
498
+ "rstrip": false,
499
+ "single_word": false,
500
+ "special": true
501
+ },
502
+ "62": {
503
+ "content": "<|reserved_token_57|>",
504
+ "lstrip": false,
505
+ "normalized": false,
506
+ "rstrip": false,
507
+ "single_word": false,
508
+ "special": true
509
+ },
510
+ "63": {
511
+ "content": "<|reserved_token_58|>",
512
+ "lstrip": false,
513
+ "normalized": false,
514
+ "rstrip": false,
515
+ "single_word": false,
516
+ "special": true
517
+ },
518
+ "64": {
519
+ "content": "<|reserved_token_59|>",
520
+ "lstrip": false,
521
+ "normalized": false,
522
+ "rstrip": false,
523
+ "single_word": false,
524
+ "special": true
525
+ },
526
+ "65": {
527
+ "content": "<|reserved_token_60|>",
528
+ "lstrip": false,
529
+ "normalized": false,
530
+ "rstrip": false,
531
+ "single_word": false,
532
+ "special": true
533
+ },
534
+ "66": {
535
+ "content": "<|reserved_token_61|>",
536
+ "lstrip": false,
537
+ "normalized": false,
538
+ "rstrip": false,
539
+ "single_word": false,
540
+ "special": true
541
+ },
542
+ "67": {
543
+ "content": "<|reserved_token_62|>",
544
+ "lstrip": false,
545
+ "normalized": false,
546
+ "rstrip": false,
547
+ "single_word": false,
548
+ "special": true
549
+ },
550
+ "68": {
551
+ "content": "<|reserved_token_63|>",
552
+ "lstrip": false,
553
+ "normalized": false,
554
+ "rstrip": false,
555
+ "single_word": false,
556
+ "special": true
557
+ },
558
+ "69": {
559
+ "content": "<|reserved_token_64|>",
560
+ "lstrip": false,
561
+ "normalized": false,
562
+ "rstrip": false,
563
+ "single_word": false,
564
+ "special": true
565
+ },
566
+ "70": {
567
+ "content": "<|reserved_token_65|>",
568
+ "lstrip": false,
569
+ "normalized": false,
570
+ "rstrip": false,
571
+ "single_word": false,
572
+ "special": true
573
+ },
574
+ "71": {
575
+ "content": "<|reserved_token_66|>",
576
+ "lstrip": false,
577
+ "normalized": false,
578
+ "rstrip": false,
579
+ "single_word": false,
580
+ "special": true
581
+ },
582
+ "72": {
583
+ "content": "<|reserved_token_67|>",
584
+ "lstrip": false,
585
+ "normalized": false,
586
+ "rstrip": false,
587
+ "single_word": false,
588
+ "special": true
589
+ },
590
+ "73": {
591
+ "content": "<|reserved_token_68|>",
592
+ "lstrip": false,
593
+ "normalized": false,
594
+ "rstrip": false,
595
+ "single_word": false,
596
+ "special": true
597
+ },
598
+ "74": {
599
+ "content": "<|reserved_token_69|>",
600
+ "lstrip": false,
601
+ "normalized": false,
602
+ "rstrip": false,
603
+ "single_word": false,
604
+ "special": true
605
+ },
606
+ "75": {
607
+ "content": "<|reserved_token_70|>",
608
+ "lstrip": false,
609
+ "normalized": false,
610
+ "rstrip": false,
611
+ "single_word": false,
612
+ "special": true
613
+ },
614
+ "76": {
615
+ "content": "<|reserved_token_71|>",
616
+ "lstrip": false,
617
+ "normalized": false,
618
+ "rstrip": false,
619
+ "single_word": false,
620
+ "special": true
621
+ },
622
+ "77": {
623
+ "content": "<|reserved_token_72|>",
624
+ "lstrip": false,
625
+ "normalized": false,
626
+ "rstrip": false,
627
+ "single_word": false,
628
+ "special": true
629
+ },
630
+ "78": {
631
+ "content": "<|reserved_token_73|>",
632
+ "lstrip": false,
633
+ "normalized": false,
634
+ "rstrip": false,
635
+ "single_word": false,
636
+ "special": true
637
+ },
638
+ "79": {
639
+ "content": "<|reserved_token_74|>",
640
+ "lstrip": false,
641
+ "normalized": false,
642
+ "rstrip": false,
643
+ "single_word": false,
644
+ "special": true
645
+ },
646
+ "80": {
647
+ "content": "<|reserved_token_75|>",
648
+ "lstrip": false,
649
+ "normalized": false,
650
+ "rstrip": false,
651
+ "single_word": false,
652
+ "special": true
653
+ },
654
+ "81": {
655
+ "content": "<|reserved_token_76|>",
656
+ "lstrip": false,
657
+ "normalized": false,
658
+ "rstrip": false,
659
+ "single_word": false,
660
+ "special": true
661
+ },
662
+ "82": {
663
+ "content": "<|reserved_token_77|>",
664
+ "lstrip": false,
665
+ "normalized": false,
666
+ "rstrip": false,
667
+ "single_word": false,
668
+ "special": true
669
+ },
670
+ "83": {
671
+ "content": "<|reserved_token_78|>",
672
+ "lstrip": false,
673
+ "normalized": false,
674
+ "rstrip": false,
675
+ "single_word": false,
676
+ "special": true
677
+ },
678
+ "84": {
679
+ "content": "<|reserved_token_79|>",
680
+ "lstrip": false,
681
+ "normalized": false,
682
+ "rstrip": false,
683
+ "single_word": false,
684
+ "special": true
685
+ },
686
+ "85": {
687
+ "content": "<|reserved_token_80|>",
688
+ "lstrip": false,
689
+ "normalized": false,
690
+ "rstrip": false,
691
+ "single_word": false,
692
+ "special": true
693
+ },
694
+ "86": {
695
+ "content": "<|reserved_token_81|>",
696
+ "lstrip": false,
697
+ "normalized": false,
698
+ "rstrip": false,
699
+ "single_word": false,
700
+ "special": true
701
+ },
702
+ "87": {
703
+ "content": "<|reserved_token_82|>",
704
+ "lstrip": false,
705
+ "normalized": false,
706
+ "rstrip": false,
707
+ "single_word": false,
708
+ "special": true
709
+ },
710
+ "88": {
711
+ "content": "<|reserved_token_83|>",
712
+ "lstrip": false,
713
+ "normalized": false,
714
+ "rstrip": false,
715
+ "single_word": false,
716
+ "special": true
717
+ },
718
+ "89": {
719
+ "content": "<|reserved_token_84|>",
720
+ "lstrip": false,
721
+ "normalized": false,
722
+ "rstrip": false,
723
+ "single_word": false,
724
+ "special": true
725
+ },
726
+ "90": {
727
+ "content": "<|reserved_token_85|>",
728
+ "lstrip": false,
729
+ "normalized": false,
730
+ "rstrip": false,
731
+ "single_word": false,
732
+ "special": true
733
+ },
734
+ "91": {
735
+ "content": "<|reserved_token_86|>",
736
+ "lstrip": false,
737
+ "normalized": false,
738
+ "rstrip": false,
739
+ "single_word": false,
740
+ "special": true
741
+ },
742
+ "92": {
743
+ "content": "<|reserved_token_87|>",
744
+ "lstrip": false,
745
+ "normalized": false,
746
+ "rstrip": false,
747
+ "single_word": false,
748
+ "special": true
749
+ },
750
+ "93": {
751
+ "content": "<|reserved_token_88|>",
752
+ "lstrip": false,
753
+ "normalized": false,
754
+ "rstrip": false,
755
+ "single_word": false,
756
+ "special": true
757
+ },
758
+ "94": {
759
+ "content": "<|reserved_token_89|>",
760
+ "lstrip": false,
761
+ "normalized": false,
762
+ "rstrip": false,
763
+ "single_word": false,
764
+ "special": true
765
+ },
766
+ "95": {
767
+ "content": "<|reserved_token_90|>",
768
+ "lstrip": false,
769
+ "normalized": false,
770
+ "rstrip": false,
771
+ "single_word": false,
772
+ "special": true
773
+ },
774
+ "96": {
775
+ "content": "<|reserved_token_91|>",
776
+ "lstrip": false,
777
+ "normalized": false,
778
+ "rstrip": false,
779
+ "single_word": false,
780
+ "special": true
781
+ },
782
+ "97": {
783
+ "content": "<|reserved_token_92|>",
784
+ "lstrip": false,
785
+ "normalized": false,
786
+ "rstrip": false,
787
+ "single_word": false,
788
+ "special": true
789
+ },
790
+ "98": {
791
+ "content": "<|reserved_token_93|>",
792
+ "lstrip": false,
793
+ "normalized": false,
794
+ "rstrip": false,
795
+ "single_word": false,
796
+ "special": true
797
+ },
798
+ "99": {
799
+ "content": "<|reserved_token_94|>",
800
+ "lstrip": false,
801
+ "normalized": false,
802
+ "rstrip": false,
803
+ "single_word": false,
804
+ "special": true
805
+ },
806
+ "100": {
807
+ "content": "<|reserved_token_95|>",
808
+ "lstrip": false,
809
+ "normalized": false,
810
+ "rstrip": false,
811
+ "single_word": false,
812
+ "special": true
813
+ },
814
+ "101": {
815
+ "content": "<|reserved_token_96|>",
816
+ "lstrip": false,
817
+ "normalized": false,
818
+ "rstrip": false,
819
+ "single_word": false,
820
+ "special": true
821
+ },
822
+ "102": {
823
+ "content": "<|reserved_token_97|>",
824
+ "lstrip": false,
825
+ "normalized": false,
826
+ "rstrip": false,
827
+ "single_word": false,
828
+ "special": true
829
+ },
830
+ "103": {
831
+ "content": "<|reserved_token_98|>",
832
+ "lstrip": false,
833
+ "normalized": false,
834
+ "rstrip": false,
835
+ "single_word": false,
836
+ "special": true
837
+ },
838
+ "104": {
839
+ "content": "\\r",
840
+ "lstrip": false,
841
+ "normalized": false,
842
+ "rstrip": false,
843
+ "single_word": false,
844
+ "special": false
845
+ },
846
+ "105": {
847
+ "content": "▁▁",
848
+ "lstrip": false,
849
+ "normalized": false,
850
+ "rstrip": false,
851
+ "single_word": false,
852
+ "special": false
853
+ },
854
+ "106": {
855
+ "content": "▁▁▁",
856
+ "lstrip": false,
857
+ "normalized": false,
858
+ "rstrip": false,
859
+ "single_word": false,
860
+ "special": false
861
+ },
862
+ "107": {
863
+ "content": "▁▁▁▁",
864
+ "lstrip": false,
865
+ "normalized": false,
866
+ "rstrip": false,
867
+ "single_word": false,
868
+ "special": false
869
+ },
870
+ "108": {
871
+ "content": "▁▁▁▁▁",
872
+ "lstrip": false,
873
+ "normalized": false,
874
+ "rstrip": false,
875
+ "single_word": false,
876
+ "special": false
877
+ },
878
+ "109": {
879
+ "content": "▁▁▁▁▁▁",
880
+ "lstrip": false,
881
+ "normalized": false,
882
+ "rstrip": false,
883
+ "single_word": false,
884
+ "special": false
885
+ },
886
+ "110": {
887
+ "content": "▁▁▁▁▁▁▁",
888
+ "lstrip": false,
889
+ "normalized": false,
890
+ "rstrip": false,
891
+ "single_word": false,
892
+ "special": false
893
+ },
894
+ "111": {
895
+ "content": "▁▁▁▁▁▁▁▁",
896
+ "lstrip": false,
897
+ "normalized": false,
898
+ "rstrip": false,
899
+ "single_word": false,
900
+ "special": false
901
+ },
902
+ "112": {
903
+ "content": "▁▁▁▁▁▁▁▁▁",
904
+ "lstrip": false,
905
+ "normalized": false,
906
+ "rstrip": false,
907
+ "single_word": false,
908
+ "special": false
909
+ },
910
+ "113": {
911
+ "content": "▁▁▁▁▁▁▁▁▁▁",
912
+ "lstrip": false,
913
+ "normalized": false,
914
+ "rstrip": false,
915
+ "single_word": false,
916
+ "special": false
917
+ },
918
+ "114": {
919
+ "content": "▁▁▁▁▁▁▁▁▁▁▁",
920
+ "lstrip": false,
921
+ "normalized": false,
922
+ "rstrip": false,
923
+ "single_word": false,
924
+ "special": false
925
+ },
926
+ "115": {
927
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁",
928
+ "lstrip": false,
929
+ "normalized": false,
930
+ "rstrip": false,
931
+ "single_word": false,
932
+ "special": false
933
+ },
934
+ "116": {
935
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁",
936
+ "lstrip": false,
937
+ "normalized": false,
938
+ "rstrip": false,
939
+ "single_word": false,
940
+ "special": false
941
+ },
942
+ "117": {
943
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
944
+ "lstrip": false,
945
+ "normalized": false,
946
+ "rstrip": false,
947
+ "single_word": false,
948
+ "special": false
949
+ },
950
+ "118": {
951
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
952
+ "lstrip": false,
953
+ "normalized": false,
954
+ "rstrip": false,
955
+ "single_word": false,
956
+ "special": false
957
+ },
958
+ "119": {
959
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
960
+ "lstrip": false,
961
+ "normalized": false,
962
+ "rstrip": false,
963
+ "single_word": false,
964
+ "special": false
965
+ },
966
+ "120": {
967
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
968
+ "lstrip": false,
969
+ "normalized": false,
970
+ "rstrip": false,
971
+ "single_word": false,
972
+ "special": false
973
+ },
974
+ "121": {
975
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
976
+ "lstrip": false,
977
+ "normalized": false,
978
+ "rstrip": false,
979
+ "single_word": false,
980
+ "special": false
981
+ },
982
+ "122": {
983
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
984
+ "lstrip": false,
985
+ "normalized": false,
986
+ "rstrip": false,
987
+ "single_word": false,
988
+ "special": false
989
+ },
990
+ "123": {
991
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
992
+ "lstrip": false,
993
+ "normalized": false,
994
+ "rstrip": false,
995
+ "single_word": false,
996
+ "special": false
997
+ },
998
+ "124": {
999
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1000
+ "lstrip": false,
1001
+ "normalized": false,
1002
+ "rstrip": false,
1003
+ "single_word": false,
1004
+ "special": false
1005
+ },
1006
+ "125": {
1007
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1008
+ "lstrip": false,
1009
+ "normalized": false,
1010
+ "rstrip": false,
1011
+ "single_word": false,
1012
+ "special": false
1013
+ },
1014
+ "126": {
1015
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1016
+ "lstrip": false,
1017
+ "normalized": false,
1018
+ "rstrip": false,
1019
+ "single_word": false,
1020
+ "special": false
1021
+ },
1022
+ "127": {
1023
+ "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁",
1024
+ "lstrip": false,
1025
+ "normalized": false,
1026
+ "rstrip": false,
1027
+ "single_word": false,
1028
+ "special": false
1029
+ },
1030
+ "128": {
1031
+ "content": "\t\t",
1032
+ "lstrip": false,
1033
+ "normalized": false,
1034
+ "rstrip": false,
1035
+ "single_word": false,
1036
+ "special": false
1037
+ },
1038
+ "129": {
1039
+ "content": "\t\t\t",
1040
+ "lstrip": false,
1041
+ "normalized": false,
1042
+ "rstrip": false,
1043
+ "single_word": false,
1044
+ "special": false
1045
+ },
1046
+ "130": {
1047
+ "content": "\t\t\t\t",
1048
+ "lstrip": false,
1049
+ "normalized": false,
1050
+ "rstrip": false,
1051
+ "single_word": false,
1052
+ "special": false
1053
+ },
1054
+ "131": {
1055
+ "content": "\t\t\t\t\t",
1056
+ "lstrip": false,
1057
+ "normalized": false,
1058
+ "rstrip": false,
1059
+ "single_word": false,
1060
+ "special": false
1061
+ },
1062
+ "132": {
1063
+ "content": "\t\t\t\t\t\t",
1064
+ "lstrip": false,
1065
+ "normalized": false,
1066
+ "rstrip": false,
1067
+ "single_word": false,
1068
+ "special": false
1069
+ },
1070
+ "133": {
1071
+ "content": "\n\n",
1072
+ "lstrip": false,
1073
+ "normalized": false,
1074
+ "rstrip": false,
1075
+ "single_word": false,
1076
+ "special": false
1077
+ },
1078
+ "134": {
1079
+ "content": "\n\n\n",
1080
+ "lstrip": false,
1081
+ "normalized": false,
1082
+ "rstrip": false,
1083
+ "single_word": false,
1084
+ "special": false
1085
+ }
1086
+ },
1087
+ "additional_special_tokens": [
1088
+ "<|im_end|>",
1089
+ "<|im_start|>"
1090
+ ],
1091
+ "bos_token": "<s>",
1092
+ "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] | trim + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
1093
+ "clean_up_tokenization_spaces": false,
1094
+ "eos_token": "<|im_end|>",
1095
+ "legacy": true,
1096
+ "model_max_length": 8192,
1097
+ "pad_token": "<unk>",
1098
+ "padding_side": "right",
1099
+ "sp_model_kwargs": {},
1100
+ "spaces_between_special_tokens": false,
1101
+ "tokenizer_class": "LlamaTokenizer",
1102
+ "unk_token": "<unk>",
1103
+ "use_default_system_prompt": false
1104
+ }
trainer_state.json ADDED
@@ -0,0 +1,2376 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 0.9984871406959153,
5
+ "eval_steps": 83,
6
+ "global_step": 330,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.0030257186081694403,
13
+ "grad_norm": 28.705169927796575,
14
+ "learning_rate": 2.0000000000000003e-06,
15
+ "loss": 1.1946,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.006051437216338881,
20
+ "grad_norm": 46.50769527739581,
21
+ "learning_rate": 4.000000000000001e-06,
22
+ "loss": 1.2123,
23
+ "step": 2
24
+ },
25
+ {
26
+ "epoch": 0.009077155824508321,
27
+ "grad_norm": 25.39574491403161,
28
+ "learning_rate": 6e-06,
29
+ "loss": 1.156,
30
+ "step": 3
31
+ },
32
+ {
33
+ "epoch": 0.012102874432677761,
34
+ "grad_norm": 45.24843842377614,
35
+ "learning_rate": 8.000000000000001e-06,
36
+ "loss": 1.0308,
37
+ "step": 4
38
+ },
39
+ {
40
+ "epoch": 0.015128593040847202,
41
+ "grad_norm": 22.433510797009237,
42
+ "learning_rate": 1e-05,
43
+ "loss": 0.8752,
44
+ "step": 5
45
+ },
46
+ {
47
+ "epoch": 0.018154311649016642,
48
+ "grad_norm": 13.241172853966125,
49
+ "learning_rate": 1.2e-05,
50
+ "loss": 0.8675,
51
+ "step": 6
52
+ },
53
+ {
54
+ "epoch": 0.02118003025718608,
55
+ "grad_norm": 22.26903307955315,
56
+ "learning_rate": 1.4e-05,
57
+ "loss": 0.8845,
58
+ "step": 7
59
+ },
60
+ {
61
+ "epoch": 0.024205748865355523,
62
+ "grad_norm": 9.822357864256519,
63
+ "learning_rate": 1.6000000000000003e-05,
64
+ "loss": 0.8197,
65
+ "step": 8
66
+ },
67
+ {
68
+ "epoch": 0.02723146747352496,
69
+ "grad_norm": 8.694636509708216,
70
+ "learning_rate": 1.8e-05,
71
+ "loss": 0.8479,
72
+ "step": 9
73
+ },
74
+ {
75
+ "epoch": 0.030257186081694403,
76
+ "grad_norm": 11.16214976570948,
77
+ "learning_rate": 2e-05,
78
+ "loss": 0.8287,
79
+ "step": 10
80
+ },
81
+ {
82
+ "epoch": 0.03328290468986384,
83
+ "grad_norm": 5.911685823891796,
84
+ "learning_rate": 1.999951808959328e-05,
85
+ "loss": 0.8226,
86
+ "step": 11
87
+ },
88
+ {
89
+ "epoch": 0.036308623298033284,
90
+ "grad_norm": 21.36728206664348,
91
+ "learning_rate": 1.9998072404820648e-05,
92
+ "loss": 0.8188,
93
+ "step": 12
94
+ },
95
+ {
96
+ "epoch": 0.039334341906202726,
97
+ "grad_norm": 6.095716493647628,
98
+ "learning_rate": 1.9995663085020215e-05,
99
+ "loss": 0.8025,
100
+ "step": 13
101
+ },
102
+ {
103
+ "epoch": 0.04236006051437216,
104
+ "grad_norm": 3.99657658054729,
105
+ "learning_rate": 1.9992290362407232e-05,
106
+ "loss": 0.7603,
107
+ "step": 14
108
+ },
109
+ {
110
+ "epoch": 0.0453857791225416,
111
+ "grad_norm": 2.9561276705682618,
112
+ "learning_rate": 1.9987954562051724e-05,
113
+ "loss": 0.7753,
114
+ "step": 15
115
+ },
116
+ {
117
+ "epoch": 0.048411497730711045,
118
+ "grad_norm": 3.1432043759551167,
119
+ "learning_rate": 1.998265610184716e-05,
120
+ "loss": 0.7652,
121
+ "step": 16
122
+ },
123
+ {
124
+ "epoch": 0.05143721633888049,
125
+ "grad_norm": 3.4729320368253522,
126
+ "learning_rate": 1.997639549247016e-05,
127
+ "loss": 0.7687,
128
+ "step": 17
129
+ },
130
+ {
131
+ "epoch": 0.05446293494704992,
132
+ "grad_norm": 2.7415235461015532,
133
+ "learning_rate": 1.9969173337331283e-05,
134
+ "loss": 0.7359,
135
+ "step": 18
136
+ },
137
+ {
138
+ "epoch": 0.057488653555219364,
139
+ "grad_norm": 3.3156999565141376,
140
+ "learning_rate": 1.9960990332516875e-05,
141
+ "loss": 0.7636,
142
+ "step": 19
143
+ },
144
+ {
145
+ "epoch": 0.060514372163388806,
146
+ "grad_norm": 3.920778052156621,
147
+ "learning_rate": 1.995184726672197e-05,
148
+ "loss": 0.7772,
149
+ "step": 20
150
+ },
151
+ {
152
+ "epoch": 0.06354009077155824,
153
+ "grad_norm": 2.8732391212646093,
154
+ "learning_rate": 1.9941745021174284e-05,
155
+ "loss": 0.7555,
156
+ "step": 21
157
+ },
158
+ {
159
+ "epoch": 0.06656580937972768,
160
+ "grad_norm": 3.365576404016739,
161
+ "learning_rate": 1.9930684569549265e-05,
162
+ "loss": 0.7404,
163
+ "step": 22
164
+ },
165
+ {
166
+ "epoch": 0.06959152798789713,
167
+ "grad_norm": 2.930471826033828,
168
+ "learning_rate": 1.991866697787626e-05,
169
+ "loss": 0.7522,
170
+ "step": 23
171
+ },
172
+ {
173
+ "epoch": 0.07261724659606657,
174
+ "grad_norm": 3.1830530819926204,
175
+ "learning_rate": 1.990569340443577e-05,
176
+ "loss": 0.7345,
177
+ "step": 24
178
+ },
179
+ {
180
+ "epoch": 0.07564296520423601,
181
+ "grad_norm": 2.8871857831062453,
182
+ "learning_rate": 1.989176509964781e-05,
183
+ "loss": 0.7031,
184
+ "step": 25
185
+ },
186
+ {
187
+ "epoch": 0.07866868381240545,
188
+ "grad_norm": 2.786912583824214,
189
+ "learning_rate": 1.9876883405951378e-05,
190
+ "loss": 0.7216,
191
+ "step": 26
192
+ },
193
+ {
194
+ "epoch": 0.08169440242057488,
195
+ "grad_norm": 2.6929192382433915,
196
+ "learning_rate": 1.9861049757675087e-05,
197
+ "loss": 0.7216,
198
+ "step": 27
199
+ },
200
+ {
201
+ "epoch": 0.08472012102874432,
202
+ "grad_norm": 2.6063726645303413,
203
+ "learning_rate": 1.9844265680898917e-05,
204
+ "loss": 0.7295,
205
+ "step": 28
206
+ },
207
+ {
208
+ "epoch": 0.08774583963691376,
209
+ "grad_norm": 3.0782649912185365,
210
+ "learning_rate": 1.982653279330712e-05,
211
+ "loss": 0.7075,
212
+ "step": 29
213
+ },
214
+ {
215
+ "epoch": 0.0907715582450832,
216
+ "grad_norm": 47.39648659137784,
217
+ "learning_rate": 1.9807852804032306e-05,
218
+ "loss": 0.7247,
219
+ "step": 30
220
+ },
221
+ {
222
+ "epoch": 0.09379727685325265,
223
+ "grad_norm": 2.576758058922432,
224
+ "learning_rate": 1.9788227513490724e-05,
225
+ "loss": 0.7332,
226
+ "step": 31
227
+ },
228
+ {
229
+ "epoch": 0.09682299546142209,
230
+ "grad_norm": 2.4299981611439834,
231
+ "learning_rate": 1.9767658813208725e-05,
232
+ "loss": 0.7215,
233
+ "step": 32
234
+ },
235
+ {
236
+ "epoch": 0.09984871406959153,
237
+ "grad_norm": 2.693034366034686,
238
+ "learning_rate": 1.974614868564045e-05,
239
+ "loss": 0.6484,
240
+ "step": 33
241
+ },
242
+ {
243
+ "epoch": 0.10287443267776097,
244
+ "grad_norm": 2.802485927477001,
245
+ "learning_rate": 1.9723699203976768e-05,
246
+ "loss": 0.7217,
247
+ "step": 34
248
+ },
249
+ {
250
+ "epoch": 0.1059001512859304,
251
+ "grad_norm": 2.344053987952208,
252
+ "learning_rate": 1.9700312531945444e-05,
253
+ "loss": 0.7304,
254
+ "step": 35
255
+ },
256
+ {
257
+ "epoch": 0.10892586989409984,
258
+ "grad_norm": 2.6568421428735873,
259
+ "learning_rate": 1.96759909236026e-05,
260
+ "loss": 0.7058,
261
+ "step": 36
262
+ },
263
+ {
264
+ "epoch": 0.11195158850226929,
265
+ "grad_norm": 2.1838166261289094,
266
+ "learning_rate": 1.9650736723115476e-05,
267
+ "loss": 0.678,
268
+ "step": 37
269
+ },
270
+ {
271
+ "epoch": 0.11497730711043873,
272
+ "grad_norm": 4.459380746633169,
273
+ "learning_rate": 1.9624552364536472e-05,
274
+ "loss": 0.7375,
275
+ "step": 38
276
+ },
277
+ {
278
+ "epoch": 0.11800302571860817,
279
+ "grad_norm": 2.194060418634913,
280
+ "learning_rate": 1.9597440371568576e-05,
281
+ "loss": 0.7119,
282
+ "step": 39
283
+ },
284
+ {
285
+ "epoch": 0.12102874432677761,
286
+ "grad_norm": 2.4998393239432053,
287
+ "learning_rate": 1.956940335732209e-05,
288
+ "loss": 0.729,
289
+ "step": 40
290
+ },
291
+ {
292
+ "epoch": 0.12405446293494705,
293
+ "grad_norm": 2.315910880599856,
294
+ "learning_rate": 1.9540444024062807e-05,
295
+ "loss": 0.7038,
296
+ "step": 41
297
+ },
298
+ {
299
+ "epoch": 0.12708018154311648,
300
+ "grad_norm": 2.2599221877365006,
301
+ "learning_rate": 1.9510565162951538e-05,
302
+ "loss": 0.738,
303
+ "step": 42
304
+ },
305
+ {
306
+ "epoch": 0.13010590015128592,
307
+ "grad_norm": 2.523504960053174,
308
+ "learning_rate": 1.9479769653775107e-05,
309
+ "loss": 0.6821,
310
+ "step": 43
311
+ },
312
+ {
313
+ "epoch": 0.13313161875945537,
314
+ "grad_norm": 2.5848519687661615,
315
+ "learning_rate": 1.944806046466878e-05,
316
+ "loss": 0.7491,
317
+ "step": 44
318
+ },
319
+ {
320
+ "epoch": 0.1361573373676248,
321
+ "grad_norm": 2.580757184651181,
322
+ "learning_rate": 1.941544065183021e-05,
323
+ "loss": 0.698,
324
+ "step": 45
325
+ },
326
+ {
327
+ "epoch": 0.13918305597579425,
328
+ "grad_norm": 2.428509105247681,
329
+ "learning_rate": 1.9381913359224844e-05,
330
+ "loss": 0.678,
331
+ "step": 46
332
+ },
333
+ {
334
+ "epoch": 0.1422087745839637,
335
+ "grad_norm": 2.175415211354414,
336
+ "learning_rate": 1.9347481818282927e-05,
337
+ "loss": 0.6631,
338
+ "step": 47
339
+ },
340
+ {
341
+ "epoch": 0.14523449319213314,
342
+ "grad_norm": 2.3270447497446916,
343
+ "learning_rate": 1.9312149347588035e-05,
344
+ "loss": 0.7061,
345
+ "step": 48
346
+ },
347
+ {
348
+ "epoch": 0.14826021180030258,
349
+ "grad_norm": 2.4287476749314836,
350
+ "learning_rate": 1.9275919352557242e-05,
351
+ "loss": 0.7237,
352
+ "step": 49
353
+ },
354
+ {
355
+ "epoch": 0.15128593040847202,
356
+ "grad_norm": 2.2410089350217213,
357
+ "learning_rate": 1.9238795325112867e-05,
358
+ "loss": 0.7193,
359
+ "step": 50
360
+ },
361
+ {
362
+ "epoch": 0.15431164901664146,
363
+ "grad_norm": 2.3787409817474536,
364
+ "learning_rate": 1.920078084334595e-05,
365
+ "loss": 0.6697,
366
+ "step": 51
367
+ },
368
+ {
369
+ "epoch": 0.1573373676248109,
370
+ "grad_norm": 2.2713457923537934,
371
+ "learning_rate": 1.916187957117136e-05,
372
+ "loss": 0.7033,
373
+ "step": 52
374
+ },
375
+ {
376
+ "epoch": 0.16036308623298035,
377
+ "grad_norm": 2.581980330545954,
378
+ "learning_rate": 1.9122095257974676e-05,
379
+ "loss": 0.6939,
380
+ "step": 53
381
+ },
382
+ {
383
+ "epoch": 0.16338880484114976,
384
+ "grad_norm": 2.432198331476182,
385
+ "learning_rate": 1.9081431738250815e-05,
386
+ "loss": 0.6892,
387
+ "step": 54
388
+ },
389
+ {
390
+ "epoch": 0.1664145234493192,
391
+ "grad_norm": 2.099821368018975,
392
+ "learning_rate": 1.9039892931234434e-05,
393
+ "loss": 0.6577,
394
+ "step": 55
395
+ },
396
+ {
397
+ "epoch": 0.16944024205748864,
398
+ "grad_norm": 2.2823357611877424,
399
+ "learning_rate": 1.8997482840522218e-05,
400
+ "loss": 0.6716,
401
+ "step": 56
402
+ },
403
+ {
404
+ "epoch": 0.17246596066565809,
405
+ "grad_norm": 11.790926761539435,
406
+ "learning_rate": 1.895420555368697e-05,
407
+ "loss": 0.6863,
408
+ "step": 57
409
+ },
410
+ {
411
+ "epoch": 0.17549167927382753,
412
+ "grad_norm": 2.1763784091818312,
413
+ "learning_rate": 1.891006524188368e-05,
414
+ "loss": 0.6779,
415
+ "step": 58
416
+ },
417
+ {
418
+ "epoch": 0.17851739788199697,
419
+ "grad_norm": 2.1869852327736554,
420
+ "learning_rate": 1.8865066159447468e-05,
421
+ "loss": 0.7007,
422
+ "step": 59
423
+ },
424
+ {
425
+ "epoch": 0.1815431164901664,
426
+ "grad_norm": 3.7000458856461753,
427
+ "learning_rate": 1.881921264348355e-05,
428
+ "loss": 0.696,
429
+ "step": 60
430
+ },
431
+ {
432
+ "epoch": 0.18456883509833585,
433
+ "grad_norm": 2.1345158830729445,
434
+ "learning_rate": 1.8772509113449243e-05,
435
+ "loss": 0.6296,
436
+ "step": 61
437
+ },
438
+ {
439
+ "epoch": 0.1875945537065053,
440
+ "grad_norm": 5.324784255219841,
441
+ "learning_rate": 1.8724960070727974e-05,
442
+ "loss": 0.7144,
443
+ "step": 62
444
+ },
445
+ {
446
+ "epoch": 0.19062027231467474,
447
+ "grad_norm": 2.2854948381160733,
448
+ "learning_rate": 1.8676570098195443e-05,
449
+ "loss": 0.6876,
450
+ "step": 63
451
+ },
452
+ {
453
+ "epoch": 0.19364599092284418,
454
+ "grad_norm": 2.2451810023966274,
455
+ "learning_rate": 1.862734385977792e-05,
456
+ "loss": 0.6923,
457
+ "step": 64
458
+ },
459
+ {
460
+ "epoch": 0.19667170953101362,
461
+ "grad_norm": 2.4986096326582397,
462
+ "learning_rate": 1.8577286100002723e-05,
463
+ "loss": 0.6974,
464
+ "step": 65
465
+ },
466
+ {
467
+ "epoch": 0.19969742813918306,
468
+ "grad_norm": 2.5519641214317117,
469
+ "learning_rate": 1.8526401643540924e-05,
470
+ "loss": 0.7081,
471
+ "step": 66
472
+ },
473
+ {
474
+ "epoch": 0.2027231467473525,
475
+ "grad_norm": 2.269549566002934,
476
+ "learning_rate": 1.8474695394742345e-05,
477
+ "loss": 0.7218,
478
+ "step": 67
479
+ },
480
+ {
481
+ "epoch": 0.20574886535552195,
482
+ "grad_norm": 1.9838611913406103,
483
+ "learning_rate": 1.8422172337162865e-05,
484
+ "loss": 0.7032,
485
+ "step": 68
486
+ },
487
+ {
488
+ "epoch": 0.2087745839636914,
489
+ "grad_norm": 2.0276028298466064,
490
+ "learning_rate": 1.8368837533084092e-05,
491
+ "loss": 0.6917,
492
+ "step": 69
493
+ },
494
+ {
495
+ "epoch": 0.2118003025718608,
496
+ "grad_norm": 1.8464725712525754,
497
+ "learning_rate": 1.8314696123025456e-05,
498
+ "loss": 0.6734,
499
+ "step": 70
500
+ },
501
+ {
502
+ "epoch": 0.21482602118003025,
503
+ "grad_norm": 2.1527856581976623,
504
+ "learning_rate": 1.825975332524873e-05,
505
+ "loss": 0.698,
506
+ "step": 71
507
+ },
508
+ {
509
+ "epoch": 0.2178517397881997,
510
+ "grad_norm": 2.016545413670171,
511
+ "learning_rate": 1.8204014435255136e-05,
512
+ "loss": 0.6818,
513
+ "step": 72
514
+ },
515
+ {
516
+ "epoch": 0.22087745839636913,
517
+ "grad_norm": 2.3166505526604015,
518
+ "learning_rate": 1.8147484825274895e-05,
519
+ "loss": 0.6432,
520
+ "step": 73
521
+ },
522
+ {
523
+ "epoch": 0.22390317700453857,
524
+ "grad_norm": 2.020726634051282,
525
+ "learning_rate": 1.8090169943749477e-05,
526
+ "loss": 0.6813,
527
+ "step": 74
528
+ },
529
+ {
530
+ "epoch": 0.22692889561270801,
531
+ "grad_norm": 2.271648285326035,
532
+ "learning_rate": 1.803207531480645e-05,
533
+ "loss": 0.7153,
534
+ "step": 75
535
+ },
536
+ {
537
+ "epoch": 0.22995461422087746,
538
+ "grad_norm": 2.0267783730128848,
539
+ "learning_rate": 1.797320653772707e-05,
540
+ "loss": 0.668,
541
+ "step": 76
542
+ },
543
+ {
544
+ "epoch": 0.2329803328290469,
545
+ "grad_norm": 2.060993406421187,
546
+ "learning_rate": 1.7913569286406606e-05,
547
+ "loss": 0.7068,
548
+ "step": 77
549
+ },
550
+ {
551
+ "epoch": 0.23600605143721634,
552
+ "grad_norm": 1.8795048223398712,
553
+ "learning_rate": 1.785316930880745e-05,
554
+ "loss": 0.718,
555
+ "step": 78
556
+ },
557
+ {
558
+ "epoch": 0.23903177004538578,
559
+ "grad_norm": 2.0498135335289707,
560
+ "learning_rate": 1.779201242640517e-05,
561
+ "loss": 0.6744,
562
+ "step": 79
563
+ },
564
+ {
565
+ "epoch": 0.24205748865355523,
566
+ "grad_norm": 1.9785152837196573,
567
+ "learning_rate": 1.773010453362737e-05,
568
+ "loss": 0.6739,
569
+ "step": 80
570
+ },
571
+ {
572
+ "epoch": 0.24508320726172467,
573
+ "grad_norm": 2.0275695322358867,
574
+ "learning_rate": 1.7667451597285617e-05,
575
+ "loss": 0.6866,
576
+ "step": 81
577
+ },
578
+ {
579
+ "epoch": 0.2481089258698941,
580
+ "grad_norm": 2.0024823958969464,
581
+ "learning_rate": 1.7604059656000313e-05,
582
+ "loss": 0.6949,
583
+ "step": 82
584
+ },
585
+ {
586
+ "epoch": 0.25113464447806355,
587
+ "grad_norm": 2.0089305685455527,
588
+ "learning_rate": 1.7539934819618696e-05,
589
+ "loss": 0.6622,
590
+ "step": 83
591
+ },
592
+ {
593
+ "epoch": 0.25113464447806355,
594
+ "eval_loss": 0.6706682443618774,
595
+ "eval_runtime": 103.3626,
596
+ "eval_samples_per_second": 40.924,
597
+ "eval_steps_per_second": 0.648,
598
+ "step": 83
599
+ },
600
+ {
601
+ "epoch": 0.25416036308623297,
602
+ "grad_norm": 2.00524741649147,
603
+ "learning_rate": 1.747508326862597e-05,
604
+ "loss": 0.6773,
605
+ "step": 84
606
+ },
607
+ {
608
+ "epoch": 0.25718608169440244,
609
+ "grad_norm": 1.8886708832788808,
610
+ "learning_rate": 1.7409511253549592e-05,
611
+ "loss": 0.6931,
612
+ "step": 85
613
+ },
614
+ {
615
+ "epoch": 0.26021180030257185,
616
+ "grad_norm": 2.0256246365229686,
617
+ "learning_rate": 1.7343225094356857e-05,
618
+ "loss": 0.6902,
619
+ "step": 86
620
+ },
621
+ {
622
+ "epoch": 0.2632375189107413,
623
+ "grad_norm": 1.8431512625044615,
624
+ "learning_rate": 1.727623117984575e-05,
625
+ "loss": 0.6556,
626
+ "step": 87
627
+ },
628
+ {
629
+ "epoch": 0.26626323751891073,
630
+ "grad_norm": 1.8944691010632577,
631
+ "learning_rate": 1.720853596702919e-05,
632
+ "loss": 0.6856,
633
+ "step": 88
634
+ },
635
+ {
636
+ "epoch": 0.2692889561270802,
637
+ "grad_norm": 1.9923243591653923,
638
+ "learning_rate": 1.7140145980512684e-05,
639
+ "loss": 0.6628,
640
+ "step": 89
641
+ },
642
+ {
643
+ "epoch": 0.2723146747352496,
644
+ "grad_norm": 2.2829966970709688,
645
+ "learning_rate": 1.7071067811865477e-05,
646
+ "loss": 0.7071,
647
+ "step": 90
648
+ },
649
+ {
650
+ "epoch": 0.2753403933434191,
651
+ "grad_norm": 1.9768171039604017,
652
+ "learning_rate": 1.7001308118985237e-05,
653
+ "loss": 0.6874,
654
+ "step": 91
655
+ },
656
+ {
657
+ "epoch": 0.2783661119515885,
658
+ "grad_norm": 1.8663920170577142,
659
+ "learning_rate": 1.6930873625456362e-05,
660
+ "loss": 0.6844,
661
+ "step": 92
662
+ },
663
+ {
664
+ "epoch": 0.2813918305597579,
665
+ "grad_norm": 1.9299544891190301,
666
+ "learning_rate": 1.685977111990193e-05,
667
+ "loss": 0.6853,
668
+ "step": 93
669
+ },
670
+ {
671
+ "epoch": 0.2844175491679274,
672
+ "grad_norm": 2.2225214208045183,
673
+ "learning_rate": 1.678800745532942e-05,
674
+ "loss": 0.6729,
675
+ "step": 94
676
+ },
677
+ {
678
+ "epoch": 0.2874432677760968,
679
+ "grad_norm": 2.0661555212699905,
680
+ "learning_rate": 1.6715589548470187e-05,
681
+ "loss": 0.6844,
682
+ "step": 95
683
+ },
684
+ {
685
+ "epoch": 0.29046898638426627,
686
+ "grad_norm": 2.007818883998332,
687
+ "learning_rate": 1.664252437911282e-05,
688
+ "loss": 0.7108,
689
+ "step": 96
690
+ },
691
+ {
692
+ "epoch": 0.2934947049924357,
693
+ "grad_norm": 2.0125558122393,
694
+ "learning_rate": 1.6568818989430416e-05,
695
+ "loss": 0.6667,
696
+ "step": 97
697
+ },
698
+ {
699
+ "epoch": 0.29652042360060515,
700
+ "grad_norm": 2.1343504218654012,
701
+ "learning_rate": 1.6494480483301836e-05,
702
+ "loss": 0.6732,
703
+ "step": 98
704
+ },
705
+ {
706
+ "epoch": 0.29954614220877457,
707
+ "grad_norm": 1.9813823609834715,
708
+ "learning_rate": 1.641951602562703e-05,
709
+ "loss": 0.6593,
710
+ "step": 99
711
+ },
712
+ {
713
+ "epoch": 0.30257186081694404,
714
+ "grad_norm": 1.975056311713501,
715
+ "learning_rate": 1.6343932841636455e-05,
716
+ "loss": 0.683,
717
+ "step": 100
718
+ },
719
+ {
720
+ "epoch": 0.30559757942511345,
721
+ "grad_norm": 1.854491404440836,
722
+ "learning_rate": 1.6267738216194698e-05,
723
+ "loss": 0.6826,
724
+ "step": 101
725
+ },
726
+ {
727
+ "epoch": 0.3086232980332829,
728
+ "grad_norm": 1.891220184424636,
729
+ "learning_rate": 1.6190939493098344e-05,
730
+ "loss": 0.665,
731
+ "step": 102
732
+ },
733
+ {
734
+ "epoch": 0.31164901664145234,
735
+ "grad_norm": 2.735004116658562,
736
+ "learning_rate": 1.6113544074368166e-05,
737
+ "loss": 0.6818,
738
+ "step": 103
739
+ },
740
+ {
741
+ "epoch": 0.3146747352496218,
742
+ "grad_norm": 1.844699616410939,
743
+ "learning_rate": 1.6035559419535714e-05,
744
+ "loss": 0.6771,
745
+ "step": 104
746
+ },
747
+ {
748
+ "epoch": 0.3177004538577912,
749
+ "grad_norm": 2.0074416503342536,
750
+ "learning_rate": 1.5956993044924334e-05,
751
+ "loss": 0.664,
752
+ "step": 105
753
+ },
754
+ {
755
+ "epoch": 0.3207261724659607,
756
+ "grad_norm": 1.9077963205419504,
757
+ "learning_rate": 1.5877852522924733e-05,
758
+ "loss": 0.6387,
759
+ "step": 106
760
+ },
761
+ {
762
+ "epoch": 0.3237518910741301,
763
+ "grad_norm": 2.5814428860557315,
764
+ "learning_rate": 1.579814548126514e-05,
765
+ "loss": 0.6463,
766
+ "step": 107
767
+ },
768
+ {
769
+ "epoch": 0.3267776096822995,
770
+ "grad_norm": 2.02923331561202,
771
+ "learning_rate": 1.5717879602276123e-05,
772
+ "loss": 0.6408,
773
+ "step": 108
774
+ },
775
+ {
776
+ "epoch": 0.329803328290469,
777
+ "grad_norm": 1.8232061934697785,
778
+ "learning_rate": 1.5637062622150168e-05,
779
+ "loss": 0.6328,
780
+ "step": 109
781
+ },
782
+ {
783
+ "epoch": 0.3328290468986384,
784
+ "grad_norm": 1.944530680893889,
785
+ "learning_rate": 1.5555702330196024e-05,
786
+ "loss": 0.6692,
787
+ "step": 110
788
+ },
789
+ {
790
+ "epoch": 0.3358547655068079,
791
+ "grad_norm": 2.2591890489972735,
792
+ "learning_rate": 1.547380656808797e-05,
793
+ "loss": 0.6503,
794
+ "step": 111
795
+ },
796
+ {
797
+ "epoch": 0.3388804841149773,
798
+ "grad_norm": 1.9297499891414,
799
+ "learning_rate": 1.5391383229110005e-05,
800
+ "loss": 0.6871,
801
+ "step": 112
802
+ },
803
+ {
804
+ "epoch": 0.34190620272314676,
805
+ "grad_norm": 1.9218912909376045,
806
+ "learning_rate": 1.5308440257395095e-05,
807
+ "loss": 0.679,
808
+ "step": 113
809
+ },
810
+ {
811
+ "epoch": 0.34493192133131617,
812
+ "grad_norm": 1.9228101408821987,
813
+ "learning_rate": 1.5224985647159489e-05,
814
+ "loss": 0.6788,
815
+ "step": 114
816
+ },
817
+ {
818
+ "epoch": 0.34795763993948564,
819
+ "grad_norm": 1.9477527079521044,
820
+ "learning_rate": 1.5141027441932217e-05,
821
+ "loss": 0.6478,
822
+ "step": 115
823
+ },
824
+ {
825
+ "epoch": 0.35098335854765506,
826
+ "grad_norm": 1.8524671597698312,
827
+ "learning_rate": 1.5056573733779848e-05,
828
+ "loss": 0.671,
829
+ "step": 116
830
+ },
831
+ {
832
+ "epoch": 0.3540090771558245,
833
+ "grad_norm": 1.8754782332500444,
834
+ "learning_rate": 1.4971632662526545e-05,
835
+ "loss": 0.6422,
836
+ "step": 117
837
+ },
838
+ {
839
+ "epoch": 0.35703479576399394,
840
+ "grad_norm": 1.9451177781938065,
841
+ "learning_rate": 1.4886212414969551e-05,
842
+ "loss": 0.6618,
843
+ "step": 118
844
+ },
845
+ {
846
+ "epoch": 0.3600605143721634,
847
+ "grad_norm": 1.9806857849814412,
848
+ "learning_rate": 1.4800321224090114e-05,
849
+ "loss": 0.6624,
850
+ "step": 119
851
+ },
852
+ {
853
+ "epoch": 0.3630862329803328,
854
+ "grad_norm": 1.9170879932814107,
855
+ "learning_rate": 1.4713967368259981e-05,
856
+ "loss": 0.6688,
857
+ "step": 120
858
+ },
859
+ {
860
+ "epoch": 0.3661119515885023,
861
+ "grad_norm": 8.790756396410332,
862
+ "learning_rate": 1.4627159170443504e-05,
863
+ "loss": 0.6665,
864
+ "step": 121
865
+ },
866
+ {
867
+ "epoch": 0.3691376701966717,
868
+ "grad_norm": 2.0362111479618834,
869
+ "learning_rate": 1.4539904997395468e-05,
870
+ "loss": 0.6584,
871
+ "step": 122
872
+ },
873
+ {
874
+ "epoch": 0.3721633888048411,
875
+ "grad_norm": 1.8915219393485387,
876
+ "learning_rate": 1.4452213258854684e-05,
877
+ "loss": 0.6566,
878
+ "step": 123
879
+ },
880
+ {
881
+ "epoch": 0.3751891074130106,
882
+ "grad_norm": 1.8144407540201268,
883
+ "learning_rate": 1.436409240673342e-05,
884
+ "loss": 0.6554,
885
+ "step": 124
886
+ },
887
+ {
888
+ "epoch": 0.37821482602118,
889
+ "grad_norm": 1.967650978948985,
890
+ "learning_rate": 1.4275550934302822e-05,
891
+ "loss": 0.6933,
892
+ "step": 125
893
+ },
894
+ {
895
+ "epoch": 0.3812405446293495,
896
+ "grad_norm": 1.9983937915711714,
897
+ "learning_rate": 1.4186597375374283e-05,
898
+ "loss": 0.6587,
899
+ "step": 126
900
+ },
901
+ {
902
+ "epoch": 0.3842662632375189,
903
+ "grad_norm": 1.8623756601102601,
904
+ "learning_rate": 1.4097240303476955e-05,
905
+ "loss": 0.663,
906
+ "step": 127
907
+ },
908
+ {
909
+ "epoch": 0.38729198184568836,
910
+ "grad_norm": 1.8157773118402531,
911
+ "learning_rate": 1.4007488331031409e-05,
912
+ "loss": 0.6355,
913
+ "step": 128
914
+ },
915
+ {
916
+ "epoch": 0.3903177004538578,
917
+ "grad_norm": 2.0094412370616337,
918
+ "learning_rate": 1.391735010851956e-05,
919
+ "loss": 0.6984,
920
+ "step": 129
921
+ },
922
+ {
923
+ "epoch": 0.39334341906202724,
924
+ "grad_norm": 1.8684281968245415,
925
+ "learning_rate": 1.3826834323650899e-05,
926
+ "loss": 0.6868,
927
+ "step": 130
928
+ },
929
+ {
930
+ "epoch": 0.39636913767019666,
931
+ "grad_norm": 1.7815078230285688,
932
+ "learning_rate": 1.3735949700525164e-05,
933
+ "loss": 0.6395,
934
+ "step": 131
935
+ },
936
+ {
937
+ "epoch": 0.39939485627836613,
938
+ "grad_norm": 1.8484611828643538,
939
+ "learning_rate": 1.3644704998791501e-05,
940
+ "loss": 0.6638,
941
+ "step": 132
942
+ },
943
+ {
944
+ "epoch": 0.40242057488653554,
945
+ "grad_norm": 1.8795543889555775,
946
+ "learning_rate": 1.3553109012804162e-05,
947
+ "loss": 0.6584,
948
+ "step": 133
949
+ },
950
+ {
951
+ "epoch": 0.405446293494705,
952
+ "grad_norm": 2.0272538642472466,
953
+ "learning_rate": 1.346117057077493e-05,
954
+ "loss": 0.6541,
955
+ "step": 134
956
+ },
957
+ {
958
+ "epoch": 0.4084720121028744,
959
+ "grad_norm": 1.9101405219462362,
960
+ "learning_rate": 1.3368898533922202e-05,
961
+ "loss": 0.6565,
962
+ "step": 135
963
+ },
964
+ {
965
+ "epoch": 0.4114977307110439,
966
+ "grad_norm": 2.0088784211383848,
967
+ "learning_rate": 1.3276301795616937e-05,
968
+ "loss": 0.6563,
969
+ "step": 136
970
+ },
971
+ {
972
+ "epoch": 0.4145234493192133,
973
+ "grad_norm": 1.8914928243192555,
974
+ "learning_rate": 1.3183389280525497e-05,
975
+ "loss": 0.6901,
976
+ "step": 137
977
+ },
978
+ {
979
+ "epoch": 0.4175491679273828,
980
+ "grad_norm": 1.7906713381258323,
981
+ "learning_rate": 1.3090169943749475e-05,
982
+ "loss": 0.6428,
983
+ "step": 138
984
+ },
985
+ {
986
+ "epoch": 0.4205748865355522,
987
+ "grad_norm": 1.9342457965175344,
988
+ "learning_rate": 1.2996652769962567e-05,
989
+ "loss": 0.6846,
990
+ "step": 139
991
+ },
992
+ {
993
+ "epoch": 0.4236006051437216,
994
+ "grad_norm": 1.9699269585190082,
995
+ "learning_rate": 1.2902846772544625e-05,
996
+ "loss": 0.65,
997
+ "step": 140
998
+ },
999
+ {
1000
+ "epoch": 0.4266263237518911,
1001
+ "grad_norm": 1.9809903638292505,
1002
+ "learning_rate": 1.2808760992712923e-05,
1003
+ "loss": 0.653,
1004
+ "step": 141
1005
+ },
1006
+ {
1007
+ "epoch": 0.4296520423600605,
1008
+ "grad_norm": 2.032945686435021,
1009
+ "learning_rate": 1.2714404498650743e-05,
1010
+ "loss": 0.6289,
1011
+ "step": 142
1012
+ },
1013
+ {
1014
+ "epoch": 0.43267776096822996,
1015
+ "grad_norm": 2.0794369135009787,
1016
+ "learning_rate": 1.2619786384633374e-05,
1017
+ "loss": 0.6617,
1018
+ "step": 143
1019
+ },
1020
+ {
1021
+ "epoch": 0.4357034795763994,
1022
+ "grad_norm": 1.866924916352073,
1023
+ "learning_rate": 1.252491577015158e-05,
1024
+ "loss": 0.6546,
1025
+ "step": 144
1026
+ },
1027
+ {
1028
+ "epoch": 0.43872919818456885,
1029
+ "grad_norm": 1.8634818795040904,
1030
+ "learning_rate": 1.242980179903264e-05,
1031
+ "loss": 0.6547,
1032
+ "step": 145
1033
+ },
1034
+ {
1035
+ "epoch": 0.44175491679273826,
1036
+ "grad_norm": 2.027288842597497,
1037
+ "learning_rate": 1.2334453638559057e-05,
1038
+ "loss": 0.6716,
1039
+ "step": 146
1040
+ },
1041
+ {
1042
+ "epoch": 0.44478063540090773,
1043
+ "grad_norm": 1.8143983839498472,
1044
+ "learning_rate": 1.2238880478584987e-05,
1045
+ "loss": 0.63,
1046
+ "step": 147
1047
+ },
1048
+ {
1049
+ "epoch": 0.44780635400907715,
1050
+ "grad_norm": 1.9479084908694164,
1051
+ "learning_rate": 1.2143091530650508e-05,
1052
+ "loss": 0.666,
1053
+ "step": 148
1054
+ },
1055
+ {
1056
+ "epoch": 0.4508320726172466,
1057
+ "grad_norm": 5.864872587821412,
1058
+ "learning_rate": 1.2047096027093798e-05,
1059
+ "loss": 0.6403,
1060
+ "step": 149
1061
+ },
1062
+ {
1063
+ "epoch": 0.45385779122541603,
1064
+ "grad_norm": 1.8796985531477728,
1065
+ "learning_rate": 1.1950903220161286e-05,
1066
+ "loss": 0.647,
1067
+ "step": 150
1068
+ },
1069
+ {
1070
+ "epoch": 0.4568835098335855,
1071
+ "grad_norm": 1.8656381112298428,
1072
+ "learning_rate": 1.185452238111591e-05,
1073
+ "loss": 0.6434,
1074
+ "step": 151
1075
+ },
1076
+ {
1077
+ "epoch": 0.4599092284417549,
1078
+ "grad_norm": 1.8201079356405698,
1079
+ "learning_rate": 1.1757962799343548e-05,
1080
+ "loss": 0.6222,
1081
+ "step": 152
1082
+ },
1083
+ {
1084
+ "epoch": 0.4629349470499244,
1085
+ "grad_norm": 1.8114819922272898,
1086
+ "learning_rate": 1.1661233781457655e-05,
1087
+ "loss": 0.6276,
1088
+ "step": 153
1089
+ },
1090
+ {
1091
+ "epoch": 0.4659606656580938,
1092
+ "grad_norm": 1.7724243161132796,
1093
+ "learning_rate": 1.156434465040231e-05,
1094
+ "loss": 0.6102,
1095
+ "step": 154
1096
+ },
1097
+ {
1098
+ "epoch": 0.4689863842662632,
1099
+ "grad_norm": 1.8396658090661742,
1100
+ "learning_rate": 1.1467304744553618e-05,
1101
+ "loss": 0.633,
1102
+ "step": 155
1103
+ },
1104
+ {
1105
+ "epoch": 0.4720121028744327,
1106
+ "grad_norm": 1.9044000192773716,
1107
+ "learning_rate": 1.1370123416819683e-05,
1108
+ "loss": 0.6881,
1109
+ "step": 156
1110
+ },
1111
+ {
1112
+ "epoch": 0.4750378214826021,
1113
+ "grad_norm": 1.9801492797746596,
1114
+ "learning_rate": 1.1272810033739134e-05,
1115
+ "loss": 0.6737,
1116
+ "step": 157
1117
+ },
1118
+ {
1119
+ "epoch": 0.47806354009077157,
1120
+ "grad_norm": 1.8321494861233507,
1121
+ "learning_rate": 1.1175373974578378e-05,
1122
+ "loss": 0.634,
1123
+ "step": 158
1124
+ },
1125
+ {
1126
+ "epoch": 0.481089258698941,
1127
+ "grad_norm": 1.7385897891185031,
1128
+ "learning_rate": 1.1077824630427593e-05,
1129
+ "loss": 0.6601,
1130
+ "step": 159
1131
+ },
1132
+ {
1133
+ "epoch": 0.48411497730711045,
1134
+ "grad_norm": 1.8872311240237092,
1135
+ "learning_rate": 1.098017140329561e-05,
1136
+ "loss": 0.62,
1137
+ "step": 160
1138
+ },
1139
+ {
1140
+ "epoch": 0.48714069591527986,
1141
+ "grad_norm": 1.9327771087303642,
1142
+ "learning_rate": 1.0882423705203698e-05,
1143
+ "loss": 0.6417,
1144
+ "step": 161
1145
+ },
1146
+ {
1147
+ "epoch": 0.49016641452344933,
1148
+ "grad_norm": 1.854145978232897,
1149
+ "learning_rate": 1.0784590957278452e-05,
1150
+ "loss": 0.6638,
1151
+ "step": 162
1152
+ },
1153
+ {
1154
+ "epoch": 0.49319213313161875,
1155
+ "grad_norm": 1.7958642023452327,
1156
+ "learning_rate": 1.0686682588843737e-05,
1157
+ "loss": 0.6329,
1158
+ "step": 163
1159
+ },
1160
+ {
1161
+ "epoch": 0.4962178517397882,
1162
+ "grad_norm": 1.8646316242847085,
1163
+ "learning_rate": 1.058870803651189e-05,
1164
+ "loss": 0.628,
1165
+ "step": 164
1166
+ },
1167
+ {
1168
+ "epoch": 0.49924357034795763,
1169
+ "grad_norm": 2.034057025942459,
1170
+ "learning_rate": 1.0490676743274181e-05,
1171
+ "loss": 0.6602,
1172
+ "step": 165
1173
+ },
1174
+ {
1175
+ "epoch": 0.5022692889561271,
1176
+ "grad_norm": 2.075987391319436,
1177
+ "learning_rate": 1.0392598157590687e-05,
1178
+ "loss": 0.6705,
1179
+ "step": 166
1180
+ },
1181
+ {
1182
+ "epoch": 0.5022692889561271,
1183
+ "eval_loss": 0.6367672681808472,
1184
+ "eval_runtime": 98.7198,
1185
+ "eval_samples_per_second": 42.849,
1186
+ "eval_steps_per_second": 0.679,
1187
+ "step": 166
1188
+ },
1189
+ {
1190
+ "epoch": 0.5052950075642966,
1191
+ "grad_norm": 1.8209280670369759,
1192
+ "learning_rate": 1.0294481732479635e-05,
1193
+ "loss": 0.6437,
1194
+ "step": 167
1195
+ },
1196
+ {
1197
+ "epoch": 0.5083207261724659,
1198
+ "grad_norm": 2.01862600963615,
1199
+ "learning_rate": 1.0196336924606282e-05,
1200
+ "loss": 0.6908,
1201
+ "step": 168
1202
+ },
1203
+ {
1204
+ "epoch": 0.5113464447806354,
1205
+ "grad_norm": 1.8600737637708504,
1206
+ "learning_rate": 1.0098173193371498e-05,
1207
+ "loss": 0.6394,
1208
+ "step": 169
1209
+ },
1210
+ {
1211
+ "epoch": 0.5143721633888049,
1212
+ "grad_norm": 1.752154066184476,
1213
+ "learning_rate": 1e-05,
1214
+ "loss": 0.641,
1215
+ "step": 170
1216
+ },
1217
+ {
1218
+ "epoch": 0.5173978819969742,
1219
+ "grad_norm": 1.7382915526611122,
1220
+ "learning_rate": 9.901826806628505e-06,
1221
+ "loss": 0.6231,
1222
+ "step": 171
1223
+ },
1224
+ {
1225
+ "epoch": 0.5204236006051437,
1226
+ "grad_norm": 1.831957458509604,
1227
+ "learning_rate": 9.80366307539372e-06,
1228
+ "loss": 0.6153,
1229
+ "step": 172
1230
+ },
1231
+ {
1232
+ "epoch": 0.5234493192133132,
1233
+ "grad_norm": 1.9814775835155989,
1234
+ "learning_rate": 9.705518267520369e-06,
1235
+ "loss": 0.6342,
1236
+ "step": 173
1237
+ },
1238
+ {
1239
+ "epoch": 0.5264750378214826,
1240
+ "grad_norm": 1.7928530986876814,
1241
+ "learning_rate": 9.607401842409318e-06,
1242
+ "loss": 0.642,
1243
+ "step": 174
1244
+ },
1245
+ {
1246
+ "epoch": 0.529500756429652,
1247
+ "grad_norm": 1.8372327601480345,
1248
+ "learning_rate": 9.50932325672582e-06,
1249
+ "loss": 0.6323,
1250
+ "step": 175
1251
+ },
1252
+ {
1253
+ "epoch": 0.5325264750378215,
1254
+ "grad_norm": 1.9570960995420552,
1255
+ "learning_rate": 9.41129196348811e-06,
1256
+ "loss": 0.6446,
1257
+ "step": 176
1258
+ },
1259
+ {
1260
+ "epoch": 0.5355521936459909,
1261
+ "grad_norm": 1.9572579231094382,
1262
+ "learning_rate": 9.313317411156265e-06,
1263
+ "loss": 0.6869,
1264
+ "step": 177
1265
+ },
1266
+ {
1267
+ "epoch": 0.5385779122541604,
1268
+ "grad_norm": 1.8108192928711597,
1269
+ "learning_rate": 9.215409042721553e-06,
1270
+ "loss": 0.6278,
1271
+ "step": 178
1272
+ },
1273
+ {
1274
+ "epoch": 0.5416036308623298,
1275
+ "grad_norm": 1.821987784514361,
1276
+ "learning_rate": 9.117576294796307e-06,
1277
+ "loss": 0.6115,
1278
+ "step": 179
1279
+ },
1280
+ {
1281
+ "epoch": 0.5446293494704992,
1282
+ "grad_norm": 1.987620896891723,
1283
+ "learning_rate": 9.019828596704394e-06,
1284
+ "loss": 0.6209,
1285
+ "step": 180
1286
+ },
1287
+ {
1288
+ "epoch": 0.5476550680786687,
1289
+ "grad_norm": 2.028602066146976,
1290
+ "learning_rate": 8.922175369572407e-06,
1291
+ "loss": 0.6654,
1292
+ "step": 181
1293
+ },
1294
+ {
1295
+ "epoch": 0.5506807866868382,
1296
+ "grad_norm": 1.8580079260447575,
1297
+ "learning_rate": 8.824626025421625e-06,
1298
+ "loss": 0.6006,
1299
+ "step": 182
1300
+ },
1301
+ {
1302
+ "epoch": 0.5537065052950075,
1303
+ "grad_norm": 1.8986231751877851,
1304
+ "learning_rate": 8.72718996626087e-06,
1305
+ "loss": 0.6214,
1306
+ "step": 183
1307
+ },
1308
+ {
1309
+ "epoch": 0.556732223903177,
1310
+ "grad_norm": 1.8492480820880255,
1311
+ "learning_rate": 8.629876583180322e-06,
1312
+ "loss": 0.6186,
1313
+ "step": 184
1314
+ },
1315
+ {
1316
+ "epoch": 0.5597579425113465,
1317
+ "grad_norm": 1.922805209593131,
1318
+ "learning_rate": 8.532695255446384e-06,
1319
+ "loss": 0.6115,
1320
+ "step": 185
1321
+ },
1322
+ {
1323
+ "epoch": 0.5627836611195158,
1324
+ "grad_norm": 2.6417020206583954,
1325
+ "learning_rate": 8.43565534959769e-06,
1326
+ "loss": 0.6238,
1327
+ "step": 186
1328
+ },
1329
+ {
1330
+ "epoch": 0.5658093797276853,
1331
+ "grad_norm": 1.6531736881779153,
1332
+ "learning_rate": 8.338766218542348e-06,
1333
+ "loss": 0.5963,
1334
+ "step": 187
1335
+ },
1336
+ {
1337
+ "epoch": 0.5688350983358548,
1338
+ "grad_norm": 1.9058816333699906,
1339
+ "learning_rate": 8.242037200656455e-06,
1340
+ "loss": 0.657,
1341
+ "step": 188
1342
+ },
1343
+ {
1344
+ "epoch": 0.5718608169440242,
1345
+ "grad_norm": 1.844821663246108,
1346
+ "learning_rate": 8.145477618884092e-06,
1347
+ "loss": 0.6198,
1348
+ "step": 189
1349
+ },
1350
+ {
1351
+ "epoch": 0.5748865355521936,
1352
+ "grad_norm": 1.7545017629558766,
1353
+ "learning_rate": 8.04909677983872e-06,
1354
+ "loss": 0.6333,
1355
+ "step": 190
1356
+ },
1357
+ {
1358
+ "epoch": 0.5779122541603631,
1359
+ "grad_norm": 10.188950064582118,
1360
+ "learning_rate": 7.952903972906205e-06,
1361
+ "loss": 0.6019,
1362
+ "step": 191
1363
+ },
1364
+ {
1365
+ "epoch": 0.5809379727685325,
1366
+ "grad_norm": 1.926798592832203,
1367
+ "learning_rate": 7.856908469349495e-06,
1368
+ "loss": 0.6352,
1369
+ "step": 192
1370
+ },
1371
+ {
1372
+ "epoch": 0.583963691376702,
1373
+ "grad_norm": 1.8924205088850037,
1374
+ "learning_rate": 7.761119521415017e-06,
1375
+ "loss": 0.6337,
1376
+ "step": 193
1377
+ },
1378
+ {
1379
+ "epoch": 0.5869894099848714,
1380
+ "grad_norm": 1.8120023994489047,
1381
+ "learning_rate": 7.66554636144095e-06,
1382
+ "loss": 0.6432,
1383
+ "step": 194
1384
+ },
1385
+ {
1386
+ "epoch": 0.5900151285930408,
1387
+ "grad_norm": 1.963933061903905,
1388
+ "learning_rate": 7.570198200967363e-06,
1389
+ "loss": 0.6471,
1390
+ "step": 195
1391
+ },
1392
+ {
1393
+ "epoch": 0.5930408472012103,
1394
+ "grad_norm": 1.936190947316387,
1395
+ "learning_rate": 7.4750842298484205e-06,
1396
+ "loss": 0.6318,
1397
+ "step": 196
1398
+ },
1399
+ {
1400
+ "epoch": 0.5960665658093798,
1401
+ "grad_norm": 1.7686124345129755,
1402
+ "learning_rate": 7.380213615366627e-06,
1403
+ "loss": 0.6511,
1404
+ "step": 197
1405
+ },
1406
+ {
1407
+ "epoch": 0.5990922844175491,
1408
+ "grad_norm": 1.714431451377099,
1409
+ "learning_rate": 7.285595501349259e-06,
1410
+ "loss": 0.619,
1411
+ "step": 198
1412
+ },
1413
+ {
1414
+ "epoch": 0.6021180030257186,
1415
+ "grad_norm": 1.794288134133105,
1416
+ "learning_rate": 7.191239007287082e-06,
1417
+ "loss": 0.6368,
1418
+ "step": 199
1419
+ },
1420
+ {
1421
+ "epoch": 0.6051437216338881,
1422
+ "grad_norm": 1.9009974921888555,
1423
+ "learning_rate": 7.097153227455379e-06,
1424
+ "loss": 0.6724,
1425
+ "step": 200
1426
+ },
1427
+ {
1428
+ "epoch": 0.6081694402420574,
1429
+ "grad_norm": 1.8815723068802426,
1430
+ "learning_rate": 7.003347230037434e-06,
1431
+ "loss": 0.6479,
1432
+ "step": 201
1433
+ },
1434
+ {
1435
+ "epoch": 0.6111951588502269,
1436
+ "grad_norm": 2.0481718228938295,
1437
+ "learning_rate": 6.909830056250527e-06,
1438
+ "loss": 0.624,
1439
+ "step": 202
1440
+ },
1441
+ {
1442
+ "epoch": 0.6142208774583964,
1443
+ "grad_norm": 1.8660751503866284,
1444
+ "learning_rate": 6.816610719474503e-06,
1445
+ "loss": 0.6066,
1446
+ "step": 203
1447
+ },
1448
+ {
1449
+ "epoch": 0.6172465960665658,
1450
+ "grad_norm": 117.96240960174059,
1451
+ "learning_rate": 6.723698204383067e-06,
1452
+ "loss": 0.6306,
1453
+ "step": 204
1454
+ },
1455
+ {
1456
+ "epoch": 0.6202723146747352,
1457
+ "grad_norm": 2.076181536031468,
1458
+ "learning_rate": 6.631101466077801e-06,
1459
+ "loss": 0.6078,
1460
+ "step": 205
1461
+ },
1462
+ {
1463
+ "epoch": 0.6232980332829047,
1464
+ "grad_norm": 1.7578318607797734,
1465
+ "learning_rate": 6.538829429225068e-06,
1466
+ "loss": 0.6177,
1467
+ "step": 206
1468
+ },
1469
+ {
1470
+ "epoch": 0.6263237518910741,
1471
+ "grad_norm": 1.9981314247674604,
1472
+ "learning_rate": 6.446890987195842e-06,
1473
+ "loss": 0.6271,
1474
+ "step": 207
1475
+ },
1476
+ {
1477
+ "epoch": 0.6293494704992436,
1478
+ "grad_norm": 1.8418024833537472,
1479
+ "learning_rate": 6.355295001208504e-06,
1480
+ "loss": 0.6564,
1481
+ "step": 208
1482
+ },
1483
+ {
1484
+ "epoch": 0.632375189107413,
1485
+ "grad_norm": 1.7839428525822674,
1486
+ "learning_rate": 6.2640502994748375e-06,
1487
+ "loss": 0.6028,
1488
+ "step": 209
1489
+ },
1490
+ {
1491
+ "epoch": 0.6354009077155824,
1492
+ "grad_norm": 1.9280913987327355,
1493
+ "learning_rate": 6.173165676349103e-06,
1494
+ "loss": 0.6325,
1495
+ "step": 210
1496
+ },
1497
+ {
1498
+ "epoch": 0.6384266263237519,
1499
+ "grad_norm": 1.7624114926890098,
1500
+ "learning_rate": 6.082649891480441e-06,
1501
+ "loss": 0.6039,
1502
+ "step": 211
1503
+ },
1504
+ {
1505
+ "epoch": 0.6414523449319214,
1506
+ "grad_norm": 1.7978369158197076,
1507
+ "learning_rate": 5.9925116689685925e-06,
1508
+ "loss": 0.6116,
1509
+ "step": 212
1510
+ },
1511
+ {
1512
+ "epoch": 0.6444780635400907,
1513
+ "grad_norm": 1.7943900816934446,
1514
+ "learning_rate": 5.902759696523046e-06,
1515
+ "loss": 0.6142,
1516
+ "step": 213
1517
+ },
1518
+ {
1519
+ "epoch": 0.6475037821482602,
1520
+ "grad_norm": 1.7168302969493772,
1521
+ "learning_rate": 5.813402624625722e-06,
1522
+ "loss": 0.6029,
1523
+ "step": 214
1524
+ },
1525
+ {
1526
+ "epoch": 0.6505295007564297,
1527
+ "grad_norm": 1.8628109691773338,
1528
+ "learning_rate": 5.724449065697182e-06,
1529
+ "loss": 0.6307,
1530
+ "step": 215
1531
+ },
1532
+ {
1533
+ "epoch": 0.653555219364599,
1534
+ "grad_norm": 1.6997400415930484,
1535
+ "learning_rate": 5.635907593266578e-06,
1536
+ "loss": 0.6015,
1537
+ "step": 216
1538
+ },
1539
+ {
1540
+ "epoch": 0.6565809379727685,
1541
+ "grad_norm": 1.7490867060597552,
1542
+ "learning_rate": 5.54778674114532e-06,
1543
+ "loss": 0.6284,
1544
+ "step": 217
1545
+ },
1546
+ {
1547
+ "epoch": 0.659606656580938,
1548
+ "grad_norm": 1.76429241360846,
1549
+ "learning_rate": 5.460095002604533e-06,
1550
+ "loss": 0.6263,
1551
+ "step": 218
1552
+ },
1553
+ {
1554
+ "epoch": 0.6626323751891074,
1555
+ "grad_norm": 2.0251369838234896,
1556
+ "learning_rate": 5.3728408295565e-06,
1557
+ "loss": 0.6419,
1558
+ "step": 219
1559
+ },
1560
+ {
1561
+ "epoch": 0.6656580937972768,
1562
+ "grad_norm": 1.7306081116333085,
1563
+ "learning_rate": 5.286032631740023e-06,
1564
+ "loss": 0.6137,
1565
+ "step": 220
1566
+ },
1567
+ {
1568
+ "epoch": 0.6686838124054463,
1569
+ "grad_norm": 1.6542505700640842,
1570
+ "learning_rate": 5.199678775909889e-06,
1571
+ "loss": 0.5967,
1572
+ "step": 221
1573
+ },
1574
+ {
1575
+ "epoch": 0.6717095310136157,
1576
+ "grad_norm": 1.7919744473172632,
1577
+ "learning_rate": 5.1137875850304545e-06,
1578
+ "loss": 0.6091,
1579
+ "step": 222
1580
+ },
1581
+ {
1582
+ "epoch": 0.6747352496217852,
1583
+ "grad_norm": 1.766603281831805,
1584
+ "learning_rate": 5.0283673374734546e-06,
1585
+ "loss": 0.6317,
1586
+ "step": 223
1587
+ },
1588
+ {
1589
+ "epoch": 0.6777609682299546,
1590
+ "grad_norm": 1.7421115340862197,
1591
+ "learning_rate": 4.943426266220156e-06,
1592
+ "loss": 0.6092,
1593
+ "step": 224
1594
+ },
1595
+ {
1596
+ "epoch": 0.680786686838124,
1597
+ "grad_norm": 1.7745552222394632,
1598
+ "learning_rate": 4.858972558067784e-06,
1599
+ "loss": 0.599,
1600
+ "step": 225
1601
+ },
1602
+ {
1603
+ "epoch": 0.6838124054462935,
1604
+ "grad_norm": 1.6972184081839272,
1605
+ "learning_rate": 4.775014352840512e-06,
1606
+ "loss": 0.6135,
1607
+ "step": 226
1608
+ },
1609
+ {
1610
+ "epoch": 0.686838124054463,
1611
+ "grad_norm": 1.698395088856387,
1612
+ "learning_rate": 4.691559742604906e-06,
1613
+ "loss": 0.6221,
1614
+ "step": 227
1615
+ },
1616
+ {
1617
+ "epoch": 0.6898638426626323,
1618
+ "grad_norm": 1.7994471135722945,
1619
+ "learning_rate": 4.608616770889998e-06,
1620
+ "loss": 0.6055,
1621
+ "step": 228
1622
+ },
1623
+ {
1624
+ "epoch": 0.6928895612708018,
1625
+ "grad_norm": 1.759126619915089,
1626
+ "learning_rate": 4.526193431912038e-06,
1627
+ "loss": 0.6309,
1628
+ "step": 229
1629
+ },
1630
+ {
1631
+ "epoch": 0.6959152798789713,
1632
+ "grad_norm": 2.100213961777996,
1633
+ "learning_rate": 4.444297669803981e-06,
1634
+ "loss": 0.6635,
1635
+ "step": 230
1636
+ },
1637
+ {
1638
+ "epoch": 0.6989409984871406,
1639
+ "grad_norm": 1.7258768310459378,
1640
+ "learning_rate": 4.362937377849832e-06,
1641
+ "loss": 0.6062,
1642
+ "step": 231
1643
+ },
1644
+ {
1645
+ "epoch": 0.7019667170953101,
1646
+ "grad_norm": 1.7559400870524307,
1647
+ "learning_rate": 4.282120397723879e-06,
1648
+ "loss": 0.6062,
1649
+ "step": 232
1650
+ },
1651
+ {
1652
+ "epoch": 0.7049924357034796,
1653
+ "grad_norm": 2.2309488171255847,
1654
+ "learning_rate": 4.2018545187348645e-06,
1655
+ "loss": 0.61,
1656
+ "step": 233
1657
+ },
1658
+ {
1659
+ "epoch": 0.708018154311649,
1660
+ "grad_norm": 1.77038154751904,
1661
+ "learning_rate": 4.12214747707527e-06,
1662
+ "loss": 0.6239,
1663
+ "step": 234
1664
+ },
1665
+ {
1666
+ "epoch": 0.7110438729198184,
1667
+ "grad_norm": 2.81931851928973,
1668
+ "learning_rate": 4.043006955075667e-06,
1669
+ "loss": 0.623,
1670
+ "step": 235
1671
+ },
1672
+ {
1673
+ "epoch": 0.7140695915279879,
1674
+ "grad_norm": 1.8647484853899023,
1675
+ "learning_rate": 3.964440580464286e-06,
1676
+ "loss": 0.6378,
1677
+ "step": 236
1678
+ },
1679
+ {
1680
+ "epoch": 0.7170953101361573,
1681
+ "grad_norm": 1.7704652348097096,
1682
+ "learning_rate": 3.8864559256318375e-06,
1683
+ "loss": 0.6091,
1684
+ "step": 237
1685
+ },
1686
+ {
1687
+ "epoch": 0.7201210287443268,
1688
+ "grad_norm": 1.8492700632395556,
1689
+ "learning_rate": 3.8090605069016596e-06,
1690
+ "loss": 0.6534,
1691
+ "step": 238
1692
+ },
1693
+ {
1694
+ "epoch": 0.7231467473524962,
1695
+ "grad_norm": 1.6859140363387968,
1696
+ "learning_rate": 3.7322617838053066e-06,
1697
+ "loss": 0.6218,
1698
+ "step": 239
1699
+ },
1700
+ {
1701
+ "epoch": 0.7261724659606656,
1702
+ "grad_norm": 1.78360745200291,
1703
+ "learning_rate": 3.6560671583635467e-06,
1704
+ "loss": 0.6533,
1705
+ "step": 240
1706
+ },
1707
+ {
1708
+ "epoch": 0.7291981845688351,
1709
+ "grad_norm": 1.8821672849391728,
1710
+ "learning_rate": 3.58048397437297e-06,
1711
+ "loss": 0.6577,
1712
+ "step": 241
1713
+ },
1714
+ {
1715
+ "epoch": 0.7322239031770046,
1716
+ "grad_norm": 1.7018559651879825,
1717
+ "learning_rate": 3.505519516698165e-06,
1718
+ "loss": 0.6077,
1719
+ "step": 242
1720
+ },
1721
+ {
1722
+ "epoch": 0.735249621785174,
1723
+ "grad_norm": 1.6816021527136724,
1724
+ "learning_rate": 3.4311810105695875e-06,
1725
+ "loss": 0.5895,
1726
+ "step": 243
1727
+ },
1728
+ {
1729
+ "epoch": 0.7382753403933434,
1730
+ "grad_norm": 1.7460874029262543,
1731
+ "learning_rate": 3.3574756208871862e-06,
1732
+ "loss": 0.6224,
1733
+ "step": 244
1734
+ },
1735
+ {
1736
+ "epoch": 0.7413010590015129,
1737
+ "grad_norm": 1.7057523984488685,
1738
+ "learning_rate": 3.284410451529816e-06,
1739
+ "loss": 0.6099,
1740
+ "step": 245
1741
+ },
1742
+ {
1743
+ "epoch": 0.7443267776096822,
1744
+ "grad_norm": 1.7270983246178484,
1745
+ "learning_rate": 3.2119925446705824e-06,
1746
+ "loss": 0.6192,
1747
+ "step": 246
1748
+ },
1749
+ {
1750
+ "epoch": 0.7473524962178517,
1751
+ "grad_norm": 1.7719385527125833,
1752
+ "learning_rate": 3.140228880098074e-06,
1753
+ "loss": 0.619,
1754
+ "step": 247
1755
+ },
1756
+ {
1757
+ "epoch": 0.7503782148260212,
1758
+ "grad_norm": 1.8025142996003436,
1759
+ "learning_rate": 3.069126374543643e-06,
1760
+ "loss": 0.6098,
1761
+ "step": 248
1762
+ },
1763
+ {
1764
+ "epoch": 0.7534039334341907,
1765
+ "grad_norm": 1.6955751524948601,
1766
+ "learning_rate": 2.998691881014765e-06,
1767
+ "loss": 0.5871,
1768
+ "step": 249
1769
+ },
1770
+ {
1771
+ "epoch": 0.7534039334341907,
1772
+ "eval_loss": 0.6061348915100098,
1773
+ "eval_runtime": 98.7005,
1774
+ "eval_samples_per_second": 42.857,
1775
+ "eval_steps_per_second": 0.679,
1776
+ "step": 249
1777
+ },
1778
+ {
1779
+ "epoch": 0.75642965204236,
1780
+ "grad_norm": 1.7209631334708135,
1781
+ "learning_rate": 2.9289321881345257e-06,
1782
+ "loss": 0.6151,
1783
+ "step": 250
1784
+ },
1785
+ {
1786
+ "epoch": 0.7594553706505295,
1787
+ "grad_norm": 1.6344767691678022,
1788
+ "learning_rate": 2.859854019487318e-06,
1789
+ "loss": 0.6141,
1790
+ "step": 251
1791
+ },
1792
+ {
1793
+ "epoch": 0.762481089258699,
1794
+ "grad_norm": 1.869803493501466,
1795
+ "learning_rate": 2.791464032970812e-06,
1796
+ "loss": 0.6133,
1797
+ "step": 252
1798
+ },
1799
+ {
1800
+ "epoch": 0.7655068078668684,
1801
+ "grad_norm": 1.7100751850539497,
1802
+ "learning_rate": 2.723768820154251e-06,
1803
+ "loss": 0.6049,
1804
+ "step": 253
1805
+ },
1806
+ {
1807
+ "epoch": 0.7685325264750378,
1808
+ "grad_norm": 1.7234424171319058,
1809
+ "learning_rate": 2.656774905643147e-06,
1810
+ "loss": 0.599,
1811
+ "step": 254
1812
+ },
1813
+ {
1814
+ "epoch": 0.7715582450832073,
1815
+ "grad_norm": 1.6878132662269478,
1816
+ "learning_rate": 2.5904887464504115e-06,
1817
+ "loss": 0.6117,
1818
+ "step": 255
1819
+ },
1820
+ {
1821
+ "epoch": 0.7745839636913767,
1822
+ "grad_norm": 1.7093233245692183,
1823
+ "learning_rate": 2.5249167313740307e-06,
1824
+ "loss": 0.6198,
1825
+ "step": 256
1826
+ },
1827
+ {
1828
+ "epoch": 0.7776096822995462,
1829
+ "grad_norm": 1.912398596023919,
1830
+ "learning_rate": 2.4600651803813057e-06,
1831
+ "loss": 0.6191,
1832
+ "step": 257
1833
+ },
1834
+ {
1835
+ "epoch": 0.7806354009077155,
1836
+ "grad_norm": 1.789468500561203,
1837
+ "learning_rate": 2.395940343999691e-06,
1838
+ "loss": 0.6267,
1839
+ "step": 258
1840
+ },
1841
+ {
1842
+ "epoch": 0.783661119515885,
1843
+ "grad_norm": 2.075497889597922,
1844
+ "learning_rate": 2.332548402714385e-06,
1845
+ "loss": 0.6137,
1846
+ "step": 259
1847
+ },
1848
+ {
1849
+ "epoch": 0.7866868381240545,
1850
+ "grad_norm": 1.7618569590652433,
1851
+ "learning_rate": 2.26989546637263e-06,
1852
+ "loss": 0.6299,
1853
+ "step": 260
1854
+ },
1855
+ {
1856
+ "epoch": 0.789712556732224,
1857
+ "grad_norm": 2.2622555895319194,
1858
+ "learning_rate": 2.207987573594833e-06,
1859
+ "loss": 0.6329,
1860
+ "step": 261
1861
+ },
1862
+ {
1863
+ "epoch": 0.7927382753403933,
1864
+ "grad_norm": 1.7894533644126578,
1865
+ "learning_rate": 2.146830691192553e-06,
1866
+ "loss": 0.5947,
1867
+ "step": 262
1868
+ },
1869
+ {
1870
+ "epoch": 0.7957639939485628,
1871
+ "grad_norm": 2.6196101274062054,
1872
+ "learning_rate": 2.086430713593397e-06,
1873
+ "loss": 0.6329,
1874
+ "step": 263
1875
+ },
1876
+ {
1877
+ "epoch": 0.7987897125567323,
1878
+ "grad_norm": 1.9123478372822518,
1879
+ "learning_rate": 2.02679346227293e-06,
1880
+ "loss": 0.6241,
1881
+ "step": 264
1882
+ },
1883
+ {
1884
+ "epoch": 0.8018154311649016,
1885
+ "grad_norm": 1.7263173456003025,
1886
+ "learning_rate": 1.967924685193552e-06,
1887
+ "loss": 0.6017,
1888
+ "step": 265
1889
+ },
1890
+ {
1891
+ "epoch": 0.8048411497730711,
1892
+ "grad_norm": 1.8467856227350854,
1893
+ "learning_rate": 1.9098300562505266e-06,
1894
+ "loss": 0.647,
1895
+ "step": 266
1896
+ },
1897
+ {
1898
+ "epoch": 0.8078668683812406,
1899
+ "grad_norm": 1.6630727621724897,
1900
+ "learning_rate": 1.8525151747251058e-06,
1901
+ "loss": 0.5739,
1902
+ "step": 267
1903
+ },
1904
+ {
1905
+ "epoch": 0.81089258698941,
1906
+ "grad_norm": 1.7285904006134964,
1907
+ "learning_rate": 1.7959855647448642e-06,
1908
+ "loss": 0.6135,
1909
+ "step": 268
1910
+ },
1911
+ {
1912
+ "epoch": 0.8139183055975794,
1913
+ "grad_norm": 1.7269298338392545,
1914
+ "learning_rate": 1.7402466747512704e-06,
1915
+ "loss": 0.6082,
1916
+ "step": 269
1917
+ },
1918
+ {
1919
+ "epoch": 0.8169440242057489,
1920
+ "grad_norm": 1.7111586205844402,
1921
+ "learning_rate": 1.6853038769745466e-06,
1922
+ "loss": 0.6155,
1923
+ "step": 270
1924
+ },
1925
+ {
1926
+ "epoch": 0.8199697428139183,
1927
+ "grad_norm": 1.674017529420373,
1928
+ "learning_rate": 1.6311624669159064e-06,
1929
+ "loss": 0.598,
1930
+ "step": 271
1931
+ },
1932
+ {
1933
+ "epoch": 0.8229954614220878,
1934
+ "grad_norm": 1.7805315596940765,
1935
+ "learning_rate": 1.577827662837136e-06,
1936
+ "loss": 0.59,
1937
+ "step": 272
1938
+ },
1939
+ {
1940
+ "epoch": 0.8260211800302572,
1941
+ "grad_norm": 1.7422765737032646,
1942
+ "learning_rate": 1.5253046052576559e-06,
1943
+ "loss": 0.625,
1944
+ "step": 273
1945
+ },
1946
+ {
1947
+ "epoch": 0.8290468986384266,
1948
+ "grad_norm": 1.7242936339101036,
1949
+ "learning_rate": 1.4735983564590784e-06,
1950
+ "loss": 0.5911,
1951
+ "step": 274
1952
+ },
1953
+ {
1954
+ "epoch": 0.8320726172465961,
1955
+ "grad_norm": 1.7272091925820903,
1956
+ "learning_rate": 1.4227138999972801e-06,
1957
+ "loss": 0.6151,
1958
+ "step": 275
1959
+ },
1960
+ {
1961
+ "epoch": 0.8350983358547656,
1962
+ "grad_norm": 1.7250197363496207,
1963
+ "learning_rate": 1.3726561402220818e-06,
1964
+ "loss": 0.6215,
1965
+ "step": 276
1966
+ },
1967
+ {
1968
+ "epoch": 0.8381240544629349,
1969
+ "grad_norm": 1.6485340400230004,
1970
+ "learning_rate": 1.3234299018045615e-06,
1971
+ "loss": 0.5766,
1972
+ "step": 277
1973
+ },
1974
+ {
1975
+ "epoch": 0.8411497730711044,
1976
+ "grad_norm": 1.8678393181143942,
1977
+ "learning_rate": 1.2750399292720284e-06,
1978
+ "loss": 0.6286,
1979
+ "step": 278
1980
+ },
1981
+ {
1982
+ "epoch": 0.8441754916792739,
1983
+ "grad_norm": 1.6737731054502747,
1984
+ "learning_rate": 1.2274908865507595e-06,
1985
+ "loss": 0.588,
1986
+ "step": 279
1987
+ },
1988
+ {
1989
+ "epoch": 0.8472012102874432,
1990
+ "grad_norm": 1.7799288792849057,
1991
+ "learning_rate": 1.1807873565164507e-06,
1992
+ "loss": 0.5964,
1993
+ "step": 280
1994
+ },
1995
+ {
1996
+ "epoch": 0.8502269288956127,
1997
+ "grad_norm": 1.747150097774928,
1998
+ "learning_rate": 1.1349338405525368e-06,
1999
+ "loss": 0.6129,
2000
+ "step": 281
2001
+ },
2002
+ {
2003
+ "epoch": 0.8532526475037822,
2004
+ "grad_norm": 1.8392197300038893,
2005
+ "learning_rate": 1.0899347581163222e-06,
2006
+ "loss": 0.5917,
2007
+ "step": 282
2008
+ },
2009
+ {
2010
+ "epoch": 0.8562783661119516,
2011
+ "grad_norm": 2.033909314856892,
2012
+ "learning_rate": 1.045794446313031e-06,
2013
+ "loss": 0.6101,
2014
+ "step": 283
2015
+ },
2016
+ {
2017
+ "epoch": 0.859304084720121,
2018
+ "grad_norm": 1.8665347961123104,
2019
+ "learning_rate": 1.0025171594777872e-06,
2020
+ "loss": 0.6011,
2021
+ "step": 284
2022
+ },
2023
+ {
2024
+ "epoch": 0.8623298033282905,
2025
+ "grad_norm": 2.0264847769487515,
2026
+ "learning_rate": 9.601070687655667e-07,
2027
+ "loss": 0.6194,
2028
+ "step": 285
2029
+ },
2030
+ {
2031
+ "epoch": 0.8653555219364599,
2032
+ "grad_norm": 1.7878198612078795,
2033
+ "learning_rate": 9.185682617491865e-07,
2034
+ "loss": 0.6341,
2035
+ "step": 286
2036
+ },
2037
+ {
2038
+ "epoch": 0.8683812405446294,
2039
+ "grad_norm": 1.7213821794488278,
2040
+ "learning_rate": 8.779047420253239e-07,
2041
+ "loss": 0.6063,
2042
+ "step": 287
2043
+ },
2044
+ {
2045
+ "epoch": 0.8714069591527988,
2046
+ "grad_norm": 1.7529253914449372,
2047
+ "learning_rate": 8.381204288286415e-07,
2048
+ "loss": 0.6237,
2049
+ "step": 288
2050
+ },
2051
+ {
2052
+ "epoch": 0.8744326777609682,
2053
+ "grad_norm": 1.739234395593092,
2054
+ "learning_rate": 7.992191566540519e-07,
2055
+ "loss": 0.5866,
2056
+ "step": 289
2057
+ },
2058
+ {
2059
+ "epoch": 0.8774583963691377,
2060
+ "grad_norm": 1.7913343867359512,
2061
+ "learning_rate": 7.612046748871327e-07,
2062
+ "loss": 0.6245,
2063
+ "step": 290
2064
+ },
2065
+ {
2066
+ "epoch": 0.8804841149773072,
2067
+ "grad_norm": 1.7520857029251033,
2068
+ "learning_rate": 7.240806474427598e-07,
2069
+ "loss": 0.6088,
2070
+ "step": 291
2071
+ },
2072
+ {
2073
+ "epoch": 0.8835098335854765,
2074
+ "grad_norm": 1.8107427903884512,
2075
+ "learning_rate": 6.878506524119644e-07,
2076
+ "loss": 0.5917,
2077
+ "step": 292
2078
+ },
2079
+ {
2080
+ "epoch": 0.886535552193646,
2081
+ "grad_norm": 2.7889166090017823,
2082
+ "learning_rate": 6.525181817170756e-07,
2083
+ "loss": 0.6033,
2084
+ "step": 293
2085
+ },
2086
+ {
2087
+ "epoch": 0.8895612708018155,
2088
+ "grad_norm": 1.746936796122282,
2089
+ "learning_rate": 6.180866407751595e-07,
2090
+ "loss": 0.6504,
2091
+ "step": 294
2092
+ },
2093
+ {
2094
+ "epoch": 0.8925869894099848,
2095
+ "grad_norm": 1.7783458626371988,
2096
+ "learning_rate": 5.845593481697931e-07,
2097
+ "loss": 0.5897,
2098
+ "step": 295
2099
+ },
2100
+ {
2101
+ "epoch": 0.8956127080181543,
2102
+ "grad_norm": 1.7474879736728264,
2103
+ "learning_rate": 5.519395353312195e-07,
2104
+ "loss": 0.5996,
2105
+ "step": 296
2106
+ },
2107
+ {
2108
+ "epoch": 0.8986384266263238,
2109
+ "grad_norm": 1.738779253442242,
2110
+ "learning_rate": 5.20230346224897e-07,
2111
+ "loss": 0.6437,
2112
+ "step": 297
2113
+ },
2114
+ {
2115
+ "epoch": 0.9016641452344932,
2116
+ "grad_norm": 1.7492041972741343,
2117
+ "learning_rate": 4.894348370484648e-07,
2118
+ "loss": 0.6112,
2119
+ "step": 298
2120
+ },
2121
+ {
2122
+ "epoch": 0.9046898638426626,
2123
+ "grad_norm": 1.7353938238847695,
2124
+ "learning_rate": 4.5955597593719593e-07,
2125
+ "loss": 0.6131,
2126
+ "step": 299
2127
+ },
2128
+ {
2129
+ "epoch": 0.9077155824508321,
2130
+ "grad_norm": 1.6794144202567616,
2131
+ "learning_rate": 4.305966426779118e-07,
2132
+ "loss": 0.6217,
2133
+ "step": 300
2134
+ },
2135
+ {
2136
+ "epoch": 0.9107413010590015,
2137
+ "grad_norm": 1.6476681925670142,
2138
+ "learning_rate": 4.025596284314259e-07,
2139
+ "loss": 0.5749,
2140
+ "step": 301
2141
+ },
2142
+ {
2143
+ "epoch": 0.913767019667171,
2144
+ "grad_norm": 1.779793559081367,
2145
+ "learning_rate": 3.7544763546352834e-07,
2146
+ "loss": 0.5983,
2147
+ "step": 302
2148
+ },
2149
+ {
2150
+ "epoch": 0.9167927382753404,
2151
+ "grad_norm": 1.7898354786144914,
2152
+ "learning_rate": 3.492632768845261e-07,
2153
+ "loss": 0.6224,
2154
+ "step": 303
2155
+ },
2156
+ {
2157
+ "epoch": 0.9198184568835098,
2158
+ "grad_norm": 1.6850170985387,
2159
+ "learning_rate": 3.2400907639740243e-07,
2160
+ "loss": 0.5852,
2161
+ "step": 304
2162
+ },
2163
+ {
2164
+ "epoch": 0.9228441754916793,
2165
+ "grad_norm": 1.7003389450383504,
2166
+ "learning_rate": 2.996874680545603e-07,
2167
+ "loss": 0.604,
2168
+ "step": 305
2169
+ },
2170
+ {
2171
+ "epoch": 0.9258698940998488,
2172
+ "grad_norm": 1.8138684808267453,
2173
+ "learning_rate": 2.7630079602323447e-07,
2174
+ "loss": 0.606,
2175
+ "step": 306
2176
+ },
2177
+ {
2178
+ "epoch": 0.9288956127080181,
2179
+ "grad_norm": 1.631401355733875,
2180
+ "learning_rate": 2.5385131435955e-07,
2181
+ "loss": 0.5893,
2182
+ "step": 307
2183
+ },
2184
+ {
2185
+ "epoch": 0.9319213313161876,
2186
+ "grad_norm": 1.6639411975053835,
2187
+ "learning_rate": 2.3234118679127615e-07,
2188
+ "loss": 0.5991,
2189
+ "step": 308
2190
+ },
2191
+ {
2192
+ "epoch": 0.9349470499243571,
2193
+ "grad_norm": 1.8650796449881923,
2194
+ "learning_rate": 2.117724865092774e-07,
2195
+ "loss": 0.6116,
2196
+ "step": 309
2197
+ },
2198
+ {
2199
+ "epoch": 0.9379727685325264,
2200
+ "grad_norm": 1.8481908824589233,
2201
+ "learning_rate": 1.921471959676957e-07,
2202
+ "loss": 0.6319,
2203
+ "step": 310
2204
+ },
2205
+ {
2206
+ "epoch": 0.9409984871406959,
2207
+ "grad_norm": 1.8047500663011353,
2208
+ "learning_rate": 1.734672066928822e-07,
2209
+ "loss": 0.5994,
2210
+ "step": 311
2211
+ },
2212
+ {
2213
+ "epoch": 0.9440242057488654,
2214
+ "grad_norm": 1.8464143580018735,
2215
+ "learning_rate": 1.5573431910108404e-07,
2216
+ "loss": 0.5968,
2217
+ "step": 312
2218
+ },
2219
+ {
2220
+ "epoch": 0.9470499243570348,
2221
+ "grad_norm": 1.5943324787903639,
2222
+ "learning_rate": 1.3895024232491338e-07,
2223
+ "loss": 0.5795,
2224
+ "step": 313
2225
+ },
2226
+ {
2227
+ "epoch": 0.9500756429652042,
2228
+ "grad_norm": 2.1710266033161147,
2229
+ "learning_rate": 1.231165940486234e-07,
2230
+ "loss": 0.6345,
2231
+ "step": 314
2232
+ },
2233
+ {
2234
+ "epoch": 0.9531013615733737,
2235
+ "grad_norm": 1.716465292208872,
2236
+ "learning_rate": 1.0823490035218986e-07,
2237
+ "loss": 0.6059,
2238
+ "step": 315
2239
+ },
2240
+ {
2241
+ "epoch": 0.9561270801815431,
2242
+ "grad_norm": 1.7863152621038756,
2243
+ "learning_rate": 9.43065955642275e-08,
2244
+ "loss": 0.6023,
2245
+ "step": 316
2246
+ },
2247
+ {
2248
+ "epoch": 0.9591527987897126,
2249
+ "grad_norm": 1.8244440730769216,
2250
+ "learning_rate": 8.133302212373961e-08,
2251
+ "loss": 0.6157,
2252
+ "step": 317
2253
+ },
2254
+ {
2255
+ "epoch": 0.962178517397882,
2256
+ "grad_norm": 1.6881545392698598,
2257
+ "learning_rate": 6.931543045073708e-08,
2258
+ "loss": 0.5915,
2259
+ "step": 318
2260
+ },
2261
+ {
2262
+ "epoch": 0.9652042360060514,
2263
+ "grad_norm": 1.7445865304504102,
2264
+ "learning_rate": 5.8254978825718065e-08,
2265
+ "loss": 0.5973,
2266
+ "step": 319
2267
+ },
2268
+ {
2269
+ "epoch": 0.9682299546142209,
2270
+ "grad_norm": 1.7698582738520474,
2271
+ "learning_rate": 4.815273327803183e-08,
2272
+ "loss": 0.5974,
2273
+ "step": 320
2274
+ },
2275
+ {
2276
+ "epoch": 0.9712556732223904,
2277
+ "grad_norm": 1.6641052228223427,
2278
+ "learning_rate": 3.900966748312862e-08,
2279
+ "loss": 0.618,
2280
+ "step": 321
2281
+ },
2282
+ {
2283
+ "epoch": 0.9742813918305597,
2284
+ "grad_norm": 1.743456317223566,
2285
+ "learning_rate": 3.082666266872036e-08,
2286
+ "loss": 0.6038,
2287
+ "step": 322
2288
+ },
2289
+ {
2290
+ "epoch": 0.9773071104387292,
2291
+ "grad_norm": 1.709368453532382,
2292
+ "learning_rate": 2.3604507529843e-08,
2293
+ "loss": 0.6199,
2294
+ "step": 323
2295
+ },
2296
+ {
2297
+ "epoch": 0.9803328290468987,
2298
+ "grad_norm": 1.770311456826043,
2299
+ "learning_rate": 1.7343898152841765e-08,
2300
+ "loss": 0.593,
2301
+ "step": 324
2302
+ },
2303
+ {
2304
+ "epoch": 0.983358547655068,
2305
+ "grad_norm": 1.6607806329342598,
2306
+ "learning_rate": 1.2045437948275952e-08,
2307
+ "loss": 0.5876,
2308
+ "step": 325
2309
+ },
2310
+ {
2311
+ "epoch": 0.9863842662632375,
2312
+ "grad_norm": 1.6047392651075518,
2313
+ "learning_rate": 7.70963759277099e-09,
2314
+ "loss": 0.5873,
2315
+ "step": 326
2316
+ },
2317
+ {
2318
+ "epoch": 0.989409984871407,
2319
+ "grad_norm": 1.7363455551140259,
2320
+ "learning_rate": 4.336914979787832e-09,
2321
+ "loss": 0.625,
2322
+ "step": 327
2323
+ },
2324
+ {
2325
+ "epoch": 0.9924357034795764,
2326
+ "grad_norm": 1.7014506767679876,
2327
+ "learning_rate": 1.9275951793518154e-09,
2328
+ "loss": 0.611,
2329
+ "step": 328
2330
+ },
2331
+ {
2332
+ "epoch": 0.9954614220877458,
2333
+ "grad_norm": 1.7378760213357103,
2334
+ "learning_rate": 4.819104067199653e-10,
2335
+ "loss": 0.6026,
2336
+ "step": 329
2337
+ },
2338
+ {
2339
+ "epoch": 0.9984871406959153,
2340
+ "grad_norm": 1.6781131162092493,
2341
+ "learning_rate": 0.0,
2342
+ "loss": 0.6165,
2343
+ "step": 330
2344
+ },
2345
+ {
2346
+ "epoch": 0.9984871406959153,
2347
+ "step": 330,
2348
+ "total_flos": 552343531683840.0,
2349
+ "train_loss": 0.660008016499606,
2350
+ "train_runtime": 4533.1032,
2351
+ "train_samples_per_second": 9.332,
2352
+ "train_steps_per_second": 0.073
2353
+ }
2354
+ ],
2355
+ "logging_steps": 1.0,
2356
+ "max_steps": 330,
2357
+ "num_input_tokens_seen": 0,
2358
+ "num_train_epochs": 1,
2359
+ "save_steps": 83,
2360
+ "stateful_callbacks": {
2361
+ "TrainerControl": {
2362
+ "args": {
2363
+ "should_epoch_stop": false,
2364
+ "should_evaluate": false,
2365
+ "should_log": false,
2366
+ "should_save": true,
2367
+ "should_training_stop": true
2368
+ },
2369
+ "attributes": {}
2370
+ }
2371
+ },
2372
+ "total_flos": 552343531683840.0,
2373
+ "train_batch_size": 1,
2374
+ "trial_name": null,
2375
+ "trial_params": null
2376
+ }