furmaniak commited on
Commit
600ec22
·
verified ·
1 Parent(s): 1dd987f

Training in progress, step 100

Browse files
config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "microsoft/phi-4",
3
+ "architectures": [
4
+ "Phi3ForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "auto_map": {},
9
+ "bos_token_id": 100257,
10
+ "embd_pdrop": 0.0,
11
+ "eos_token_id": 100265,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 5120,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 17920,
16
+ "max_position_embeddings": 16384,
17
+ "model_type": "phi3",
18
+ "num_attention_heads": 40,
19
+ "num_hidden_layers": 40,
20
+ "num_key_value_heads": 10,
21
+ "original_max_position_embeddings": 16384,
22
+ "pad_token_id": 100349,
23
+ "resid_pdrop": 0.0,
24
+ "rms_norm_eps": 1e-05,
25
+ "rope_scaling": null,
26
+ "rope_theta": 250000,
27
+ "sliding_window": null,
28
+ "tie_word_embeddings": false,
29
+ "torch_dtype": "bfloat16",
30
+ "transformers_version": "4.48.2",
31
+ "use_cache": false,
32
+ "vocab_size": 100352
33
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d755130dabad5db45d6fefe0e01aa363c4cea91fc327038b9c577c55ca073f28
3
+ size 4933656472
model-00002-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6fbe24d34af92079619e927f8e8d37f83969ae6c3080cc871c3efd7b18bca85b
3
+ size 4954690712
model-00003-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1f07bfb996c2ebc7c7a22bd9c8dd288fef204a9891bb7eb3fb67a57ddd2a63b
3
+ size 4902241352
model-00004-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15614d1e341c85b63eaa7b7969b4d2f42fb7f9ff61f1d496a9fdddd7790083c5
3
+ size 4771169120
model-00005-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0dd616b44fd4b996c4558123f4cb67fa00e653d185fa91c73918496359ff1a16
3
+ size 4771169120
model-00006-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a99fc5a809837c073f28ede0aaeeb305bfbc2e8704f68055347ecd091c071e96
3
+ size 4986116216
model.safetensors.index.json ADDED
@@ -0,0 +1,250 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 29319014400
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00006-of-00006.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00006.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00006.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
10
+ "model.layers.0.mlp.gate_up_proj.weight": "model-00001-of-00006.safetensors",
11
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
12
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
13
+ "model.layers.0.self_attn.qkv_proj.weight": "model-00001-of-00006.safetensors",
14
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
15
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
16
+ "model.layers.1.mlp.gate_up_proj.weight": "model-00001-of-00006.safetensors",
17
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
18
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
19
+ "model.layers.1.self_attn.qkv_proj.weight": "model-00001-of-00006.safetensors",
20
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00006.safetensors",
21
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
22
+ "model.layers.10.mlp.gate_up_proj.weight": "model-00002-of-00006.safetensors",
23
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
24
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
25
+ "model.layers.10.self_attn.qkv_proj.weight": "model-00002-of-00006.safetensors",
26
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00006.safetensors",
27
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
28
+ "model.layers.11.mlp.gate_up_proj.weight": "model-00002-of-00006.safetensors",
29
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
30
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
31
+ "model.layers.11.self_attn.qkv_proj.weight": "model-00002-of-00006.safetensors",
32
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00006.safetensors",
33
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
34
+ "model.layers.12.mlp.gate_up_proj.weight": "model-00002-of-00006.safetensors",
35
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
36
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
37
+ "model.layers.12.self_attn.qkv_proj.weight": "model-00002-of-00006.safetensors",
38
+ "model.layers.13.input_layernorm.weight": "model-00003-of-00006.safetensors",
39
+ "model.layers.13.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
40
+ "model.layers.13.mlp.gate_up_proj.weight": "model-00003-of-00006.safetensors",
41
+ "model.layers.13.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
42
+ "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
43
+ "model.layers.13.self_attn.qkv_proj.weight": "model-00003-of-00006.safetensors",
44
+ "model.layers.14.input_layernorm.weight": "model-00003-of-00006.safetensors",
45
+ "model.layers.14.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
46
+ "model.layers.14.mlp.gate_up_proj.weight": "model-00003-of-00006.safetensors",
47
+ "model.layers.14.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
48
+ "model.layers.14.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
49
+ "model.layers.14.self_attn.qkv_proj.weight": "model-00003-of-00006.safetensors",
50
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00006.safetensors",
51
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
52
+ "model.layers.15.mlp.gate_up_proj.weight": "model-00003-of-00006.safetensors",
53
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
54
+ "model.layers.15.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
55
+ "model.layers.15.self_attn.qkv_proj.weight": "model-00003-of-00006.safetensors",
56
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00006.safetensors",
57
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
58
+ "model.layers.16.mlp.gate_up_proj.weight": "model-00003-of-00006.safetensors",
59
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
60
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
61
+ "model.layers.16.self_attn.qkv_proj.weight": "model-00003-of-00006.safetensors",
62
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
63
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
64
+ "model.layers.17.mlp.gate_up_proj.weight": "model-00003-of-00006.safetensors",
65
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
66
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
67
+ "model.layers.17.self_attn.qkv_proj.weight": "model-00003-of-00006.safetensors",
68
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
69
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
70
+ "model.layers.18.mlp.gate_up_proj.weight": "model-00003-of-00006.safetensors",
71
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
72
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
73
+ "model.layers.18.self_attn.qkv_proj.weight": "model-00003-of-00006.safetensors",
74
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00006.safetensors",
75
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
76
+ "model.layers.19.mlp.gate_up_proj.weight": "model-00003-of-00006.safetensors",
77
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
78
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
79
+ "model.layers.19.self_attn.qkv_proj.weight": "model-00003-of-00006.safetensors",
80
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
81
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
82
+ "model.layers.2.mlp.gate_up_proj.weight": "model-00001-of-00006.safetensors",
83
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
84
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
85
+ "model.layers.2.self_attn.qkv_proj.weight": "model-00001-of-00006.safetensors",
86
+ "model.layers.20.input_layernorm.weight": "model-00004-of-00006.safetensors",
87
+ "model.layers.20.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
88
+ "model.layers.20.mlp.gate_up_proj.weight": "model-00004-of-00006.safetensors",
89
+ "model.layers.20.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
90
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
91
+ "model.layers.20.self_attn.qkv_proj.weight": "model-00003-of-00006.safetensors",
92
+ "model.layers.21.input_layernorm.weight": "model-00004-of-00006.safetensors",
93
+ "model.layers.21.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
94
+ "model.layers.21.mlp.gate_up_proj.weight": "model-00004-of-00006.safetensors",
95
+ "model.layers.21.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
96
+ "model.layers.21.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
97
+ "model.layers.21.self_attn.qkv_proj.weight": "model-00004-of-00006.safetensors",
98
+ "model.layers.22.input_layernorm.weight": "model-00004-of-00006.safetensors",
99
+ "model.layers.22.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
100
+ "model.layers.22.mlp.gate_up_proj.weight": "model-00004-of-00006.safetensors",
101
+ "model.layers.22.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
102
+ "model.layers.22.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
103
+ "model.layers.22.self_attn.qkv_proj.weight": "model-00004-of-00006.safetensors",
104
+ "model.layers.23.input_layernorm.weight": "model-00004-of-00006.safetensors",
105
+ "model.layers.23.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
106
+ "model.layers.23.mlp.gate_up_proj.weight": "model-00004-of-00006.safetensors",
107
+ "model.layers.23.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
108
+ "model.layers.23.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
109
+ "model.layers.23.self_attn.qkv_proj.weight": "model-00004-of-00006.safetensors",
110
+ "model.layers.24.input_layernorm.weight": "model-00004-of-00006.safetensors",
111
+ "model.layers.24.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
112
+ "model.layers.24.mlp.gate_up_proj.weight": "model-00004-of-00006.safetensors",
113
+ "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
114
+ "model.layers.24.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
115
+ "model.layers.24.self_attn.qkv_proj.weight": "model-00004-of-00006.safetensors",
116
+ "model.layers.25.input_layernorm.weight": "model-00004-of-00006.safetensors",
117
+ "model.layers.25.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
118
+ "model.layers.25.mlp.gate_up_proj.weight": "model-00004-of-00006.safetensors",
119
+ "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
120
+ "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
121
+ "model.layers.25.self_attn.qkv_proj.weight": "model-00004-of-00006.safetensors",
122
+ "model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
123
+ "model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
124
+ "model.layers.26.mlp.gate_up_proj.weight": "model-00004-of-00006.safetensors",
125
+ "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
126
+ "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
127
+ "model.layers.26.self_attn.qkv_proj.weight": "model-00004-of-00006.safetensors",
128
+ "model.layers.27.input_layernorm.weight": "model-00005-of-00006.safetensors",
129
+ "model.layers.27.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
130
+ "model.layers.27.mlp.gate_up_proj.weight": "model-00005-of-00006.safetensors",
131
+ "model.layers.27.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
132
+ "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
133
+ "model.layers.27.self_attn.qkv_proj.weight": "model-00004-of-00006.safetensors",
134
+ "model.layers.28.input_layernorm.weight": "model-00005-of-00006.safetensors",
135
+ "model.layers.28.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
136
+ "model.layers.28.mlp.gate_up_proj.weight": "model-00005-of-00006.safetensors",
137
+ "model.layers.28.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
138
+ "model.layers.28.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
139
+ "model.layers.28.self_attn.qkv_proj.weight": "model-00005-of-00006.safetensors",
140
+ "model.layers.29.input_layernorm.weight": "model-00005-of-00006.safetensors",
141
+ "model.layers.29.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
142
+ "model.layers.29.mlp.gate_up_proj.weight": "model-00005-of-00006.safetensors",
143
+ "model.layers.29.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
144
+ "model.layers.29.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
145
+ "model.layers.29.self_attn.qkv_proj.weight": "model-00005-of-00006.safetensors",
146
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00006.safetensors",
147
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
148
+ "model.layers.3.mlp.gate_up_proj.weight": "model-00001-of-00006.safetensors",
149
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
150
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
151
+ "model.layers.3.self_attn.qkv_proj.weight": "model-00001-of-00006.safetensors",
152
+ "model.layers.30.input_layernorm.weight": "model-00005-of-00006.safetensors",
153
+ "model.layers.30.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
154
+ "model.layers.30.mlp.gate_up_proj.weight": "model-00005-of-00006.safetensors",
155
+ "model.layers.30.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
156
+ "model.layers.30.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
157
+ "model.layers.30.self_attn.qkv_proj.weight": "model-00005-of-00006.safetensors",
158
+ "model.layers.31.input_layernorm.weight": "model-00005-of-00006.safetensors",
159
+ "model.layers.31.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
160
+ "model.layers.31.mlp.gate_up_proj.weight": "model-00005-of-00006.safetensors",
161
+ "model.layers.31.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
162
+ "model.layers.31.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
163
+ "model.layers.31.self_attn.qkv_proj.weight": "model-00005-of-00006.safetensors",
164
+ "model.layers.32.input_layernorm.weight": "model-00005-of-00006.safetensors",
165
+ "model.layers.32.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
166
+ "model.layers.32.mlp.gate_up_proj.weight": "model-00005-of-00006.safetensors",
167
+ "model.layers.32.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
168
+ "model.layers.32.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
169
+ "model.layers.32.self_attn.qkv_proj.weight": "model-00005-of-00006.safetensors",
170
+ "model.layers.33.input_layernorm.weight": "model-00005-of-00006.safetensors",
171
+ "model.layers.33.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
172
+ "model.layers.33.mlp.gate_up_proj.weight": "model-00005-of-00006.safetensors",
173
+ "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
174
+ "model.layers.33.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
175
+ "model.layers.33.self_attn.qkv_proj.weight": "model-00005-of-00006.safetensors",
176
+ "model.layers.34.input_layernorm.weight": "model-00006-of-00006.safetensors",
177
+ "model.layers.34.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
178
+ "model.layers.34.mlp.gate_up_proj.weight": "model-00006-of-00006.safetensors",
179
+ "model.layers.34.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
180
+ "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
181
+ "model.layers.34.self_attn.qkv_proj.weight": "model-00005-of-00006.safetensors",
182
+ "model.layers.35.input_layernorm.weight": "model-00006-of-00006.safetensors",
183
+ "model.layers.35.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
184
+ "model.layers.35.mlp.gate_up_proj.weight": "model-00006-of-00006.safetensors",
185
+ "model.layers.35.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
186
+ "model.layers.35.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
187
+ "model.layers.35.self_attn.qkv_proj.weight": "model-00006-of-00006.safetensors",
188
+ "model.layers.36.input_layernorm.weight": "model-00006-of-00006.safetensors",
189
+ "model.layers.36.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
190
+ "model.layers.36.mlp.gate_up_proj.weight": "model-00006-of-00006.safetensors",
191
+ "model.layers.36.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
192
+ "model.layers.36.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
193
+ "model.layers.36.self_attn.qkv_proj.weight": "model-00006-of-00006.safetensors",
194
+ "model.layers.37.input_layernorm.weight": "model-00006-of-00006.safetensors",
195
+ "model.layers.37.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
196
+ "model.layers.37.mlp.gate_up_proj.weight": "model-00006-of-00006.safetensors",
197
+ "model.layers.37.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
198
+ "model.layers.37.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
199
+ "model.layers.37.self_attn.qkv_proj.weight": "model-00006-of-00006.safetensors",
200
+ "model.layers.38.input_layernorm.weight": "model-00006-of-00006.safetensors",
201
+ "model.layers.38.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
202
+ "model.layers.38.mlp.gate_up_proj.weight": "model-00006-of-00006.safetensors",
203
+ "model.layers.38.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
204
+ "model.layers.38.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
205
+ "model.layers.38.self_attn.qkv_proj.weight": "model-00006-of-00006.safetensors",
206
+ "model.layers.39.input_layernorm.weight": "model-00006-of-00006.safetensors",
207
+ "model.layers.39.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
208
+ "model.layers.39.mlp.gate_up_proj.weight": "model-00006-of-00006.safetensors",
209
+ "model.layers.39.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
210
+ "model.layers.39.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
211
+ "model.layers.39.self_attn.qkv_proj.weight": "model-00006-of-00006.safetensors",
212
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00006.safetensors",
213
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
214
+ "model.layers.4.mlp.gate_up_proj.weight": "model-00001-of-00006.safetensors",
215
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
216
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
217
+ "model.layers.4.self_attn.qkv_proj.weight": "model-00001-of-00006.safetensors",
218
+ "model.layers.5.input_layernorm.weight": "model-00002-of-00006.safetensors",
219
+ "model.layers.5.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
220
+ "model.layers.5.mlp.gate_up_proj.weight": "model-00001-of-00006.safetensors",
221
+ "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
222
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
223
+ "model.layers.5.self_attn.qkv_proj.weight": "model-00001-of-00006.safetensors",
224
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00006.safetensors",
225
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
226
+ "model.layers.6.mlp.gate_up_proj.weight": "model-00002-of-00006.safetensors",
227
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
228
+ "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
229
+ "model.layers.6.self_attn.qkv_proj.weight": "model-00002-of-00006.safetensors",
230
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
231
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
232
+ "model.layers.7.mlp.gate_up_proj.weight": "model-00002-of-00006.safetensors",
233
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
234
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
235
+ "model.layers.7.self_attn.qkv_proj.weight": "model-00002-of-00006.safetensors",
236
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
237
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
238
+ "model.layers.8.mlp.gate_up_proj.weight": "model-00002-of-00006.safetensors",
239
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
240
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
241
+ "model.layers.8.self_attn.qkv_proj.weight": "model-00002-of-00006.safetensors",
242
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
243
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
244
+ "model.layers.9.mlp.gate_up_proj.weight": "model-00002-of-00006.safetensors",
245
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
246
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
247
+ "model.layers.9.self_attn.qkv_proj.weight": "model-00002-of-00006.safetensors",
248
+ "model.norm.weight": "model-00006-of-00006.safetensors"
249
+ }
250
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": true,
5
+ "normalized": false,
6
+ "rstrip": true,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": true,
12
+ "normalized": false,
13
+ "rstrip": true,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|dummy_85|>",
18
+ "lstrip": true,
19
+ "normalized": false,
20
+ "rstrip": true,
21
+ "single_word": false
22
+ },
23
+ "unk_token": "<|endoftext|>"
24
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,784 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "100256": {
5
+ "content": "<|dummy_0|>",
6
+ "lstrip": true,
7
+ "normalized": false,
8
+ "rstrip": true,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "100257": {
13
+ "content": "<|endoftext|>",
14
+ "lstrip": true,
15
+ "normalized": false,
16
+ "rstrip": true,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "100258": {
21
+ "content": "<|fim_prefix|>",
22
+ "lstrip": true,
23
+ "normalized": false,
24
+ "rstrip": true,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "100259": {
29
+ "content": "<|fim_middle|>",
30
+ "lstrip": true,
31
+ "normalized": false,
32
+ "rstrip": true,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "100260": {
37
+ "content": "<|fim_suffix|>",
38
+ "lstrip": true,
39
+ "normalized": false,
40
+ "rstrip": true,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "100261": {
45
+ "content": "<|dummy_1|>",
46
+ "lstrip": true,
47
+ "normalized": false,
48
+ "rstrip": true,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "100262": {
53
+ "content": "<|dummy_2|>",
54
+ "lstrip": true,
55
+ "normalized": false,
56
+ "rstrip": true,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "100263": {
61
+ "content": "<|dummy_3|>",
62
+ "lstrip": true,
63
+ "normalized": false,
64
+ "rstrip": true,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "100264": {
69
+ "content": "<|im_start|>",
70
+ "lstrip": true,
71
+ "normalized": false,
72
+ "rstrip": true,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "100265": {
77
+ "content": "<|im_end|>",
78
+ "lstrip": true,
79
+ "normalized": false,
80
+ "rstrip": true,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "100266": {
85
+ "content": "<|im_sep|>",
86
+ "lstrip": true,
87
+ "normalized": false,
88
+ "rstrip": true,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "100267": {
93
+ "content": "<|dummy_4|>",
94
+ "lstrip": true,
95
+ "normalized": false,
96
+ "rstrip": true,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "100268": {
101
+ "content": "<|dummy_5|>",
102
+ "lstrip": true,
103
+ "normalized": false,
104
+ "rstrip": true,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "100269": {
109
+ "content": "<|dummy_6|>",
110
+ "lstrip": true,
111
+ "normalized": false,
112
+ "rstrip": true,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "100270": {
117
+ "content": "<|dummy_7|>",
118
+ "lstrip": true,
119
+ "normalized": false,
120
+ "rstrip": true,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "100271": {
125
+ "content": "<|dummy_8|>",
126
+ "lstrip": true,
127
+ "normalized": false,
128
+ "rstrip": true,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "100272": {
133
+ "content": "<|dummy_9|>",
134
+ "lstrip": true,
135
+ "normalized": false,
136
+ "rstrip": true,
137
+ "single_word": false,
138
+ "special": true
139
+ },
140
+ "100273": {
141
+ "content": "<|dummy_10|>",
142
+ "lstrip": true,
143
+ "normalized": false,
144
+ "rstrip": true,
145
+ "single_word": false,
146
+ "special": true
147
+ },
148
+ "100274": {
149
+ "content": "<|dummy_11|>",
150
+ "lstrip": true,
151
+ "normalized": false,
152
+ "rstrip": true,
153
+ "single_word": false,
154
+ "special": true
155
+ },
156
+ "100275": {
157
+ "content": "<|dummy_12|>",
158
+ "lstrip": true,
159
+ "normalized": false,
160
+ "rstrip": true,
161
+ "single_word": false,
162
+ "special": true
163
+ },
164
+ "100276": {
165
+ "content": "<|endofprompt|>",
166
+ "lstrip": true,
167
+ "normalized": false,
168
+ "rstrip": true,
169
+ "single_word": false,
170
+ "special": true
171
+ },
172
+ "100277": {
173
+ "content": "<|dummy_13|>",
174
+ "lstrip": true,
175
+ "normalized": false,
176
+ "rstrip": true,
177
+ "single_word": false,
178
+ "special": true
179
+ },
180
+ "100278": {
181
+ "content": "<|dummy_14|>",
182
+ "lstrip": true,
183
+ "normalized": false,
184
+ "rstrip": true,
185
+ "single_word": false,
186
+ "special": true
187
+ },
188
+ "100279": {
189
+ "content": "<|dummy_15|>",
190
+ "lstrip": true,
191
+ "normalized": false,
192
+ "rstrip": true,
193
+ "single_word": false,
194
+ "special": true
195
+ },
196
+ "100280": {
197
+ "content": "<|dummy_16|>",
198
+ "lstrip": true,
199
+ "normalized": false,
200
+ "rstrip": true,
201
+ "single_word": false,
202
+ "special": true
203
+ },
204
+ "100281": {
205
+ "content": "<|dummy_17|>",
206
+ "lstrip": true,
207
+ "normalized": false,
208
+ "rstrip": true,
209
+ "single_word": false,
210
+ "special": true
211
+ },
212
+ "100282": {
213
+ "content": "<|dummy_18|>",
214
+ "lstrip": true,
215
+ "normalized": false,
216
+ "rstrip": true,
217
+ "single_word": false,
218
+ "special": true
219
+ },
220
+ "100283": {
221
+ "content": "<|dummy_19|>",
222
+ "lstrip": true,
223
+ "normalized": false,
224
+ "rstrip": true,
225
+ "single_word": false,
226
+ "special": true
227
+ },
228
+ "100284": {
229
+ "content": "<|dummy_20|>",
230
+ "lstrip": true,
231
+ "normalized": false,
232
+ "rstrip": true,
233
+ "single_word": false,
234
+ "special": true
235
+ },
236
+ "100285": {
237
+ "content": "<|dummy_21|>",
238
+ "lstrip": true,
239
+ "normalized": false,
240
+ "rstrip": true,
241
+ "single_word": false,
242
+ "special": true
243
+ },
244
+ "100286": {
245
+ "content": "<|dummy_22|>",
246
+ "lstrip": true,
247
+ "normalized": false,
248
+ "rstrip": true,
249
+ "single_word": false,
250
+ "special": true
251
+ },
252
+ "100287": {
253
+ "content": "<|dummy_23|>",
254
+ "lstrip": true,
255
+ "normalized": false,
256
+ "rstrip": true,
257
+ "single_word": false,
258
+ "special": true
259
+ },
260
+ "100288": {
261
+ "content": "<|dummy_24|>",
262
+ "lstrip": true,
263
+ "normalized": false,
264
+ "rstrip": true,
265
+ "single_word": false,
266
+ "special": true
267
+ },
268
+ "100289": {
269
+ "content": "<|dummy_25|>",
270
+ "lstrip": true,
271
+ "normalized": false,
272
+ "rstrip": true,
273
+ "single_word": false,
274
+ "special": true
275
+ },
276
+ "100290": {
277
+ "content": "<|dummy_26|>",
278
+ "lstrip": true,
279
+ "normalized": false,
280
+ "rstrip": true,
281
+ "single_word": false,
282
+ "special": true
283
+ },
284
+ "100291": {
285
+ "content": "<|dummy_27|>",
286
+ "lstrip": true,
287
+ "normalized": false,
288
+ "rstrip": true,
289
+ "single_word": false,
290
+ "special": true
291
+ },
292
+ "100292": {
293
+ "content": "<|dummy_28|>",
294
+ "lstrip": true,
295
+ "normalized": false,
296
+ "rstrip": true,
297
+ "single_word": false,
298
+ "special": true
299
+ },
300
+ "100293": {
301
+ "content": "<|dummy_29|>",
302
+ "lstrip": true,
303
+ "normalized": false,
304
+ "rstrip": true,
305
+ "single_word": false,
306
+ "special": true
307
+ },
308
+ "100294": {
309
+ "content": "<|dummy_30|>",
310
+ "lstrip": true,
311
+ "normalized": false,
312
+ "rstrip": true,
313
+ "single_word": false,
314
+ "special": true
315
+ },
316
+ "100295": {
317
+ "content": "<|dummy_31|>",
318
+ "lstrip": true,
319
+ "normalized": false,
320
+ "rstrip": true,
321
+ "single_word": false,
322
+ "special": true
323
+ },
324
+ "100296": {
325
+ "content": "<|dummy_32|>",
326
+ "lstrip": true,
327
+ "normalized": false,
328
+ "rstrip": true,
329
+ "single_word": false,
330
+ "special": true
331
+ },
332
+ "100297": {
333
+ "content": "<|dummy_33|>",
334
+ "lstrip": true,
335
+ "normalized": false,
336
+ "rstrip": true,
337
+ "single_word": false,
338
+ "special": true
339
+ },
340
+ "100298": {
341
+ "content": "<|dummy_34|>",
342
+ "lstrip": true,
343
+ "normalized": false,
344
+ "rstrip": true,
345
+ "single_word": false,
346
+ "special": true
347
+ },
348
+ "100299": {
349
+ "content": "<|dummy_35|>",
350
+ "lstrip": true,
351
+ "normalized": false,
352
+ "rstrip": true,
353
+ "single_word": false,
354
+ "special": true
355
+ },
356
+ "100300": {
357
+ "content": "<|dummy_36|>",
358
+ "lstrip": true,
359
+ "normalized": false,
360
+ "rstrip": true,
361
+ "single_word": false,
362
+ "special": true
363
+ },
364
+ "100301": {
365
+ "content": "<|dummy_37|>",
366
+ "lstrip": true,
367
+ "normalized": false,
368
+ "rstrip": true,
369
+ "single_word": false,
370
+ "special": true
371
+ },
372
+ "100302": {
373
+ "content": "<|dummy_38|>",
374
+ "lstrip": true,
375
+ "normalized": false,
376
+ "rstrip": true,
377
+ "single_word": false,
378
+ "special": true
379
+ },
380
+ "100303": {
381
+ "content": "<|dummy_39|>",
382
+ "lstrip": true,
383
+ "normalized": false,
384
+ "rstrip": true,
385
+ "single_word": false,
386
+ "special": true
387
+ },
388
+ "100304": {
389
+ "content": "<|dummy_40|>",
390
+ "lstrip": true,
391
+ "normalized": false,
392
+ "rstrip": true,
393
+ "single_word": false,
394
+ "special": true
395
+ },
396
+ "100305": {
397
+ "content": "<|dummy_41|>",
398
+ "lstrip": true,
399
+ "normalized": false,
400
+ "rstrip": true,
401
+ "single_word": false,
402
+ "special": true
403
+ },
404
+ "100306": {
405
+ "content": "<|dummy_42|>",
406
+ "lstrip": true,
407
+ "normalized": false,
408
+ "rstrip": true,
409
+ "single_word": false,
410
+ "special": true
411
+ },
412
+ "100307": {
413
+ "content": "<|dummy_43|>",
414
+ "lstrip": true,
415
+ "normalized": false,
416
+ "rstrip": true,
417
+ "single_word": false,
418
+ "special": true
419
+ },
420
+ "100308": {
421
+ "content": "<|dummy_44|>",
422
+ "lstrip": true,
423
+ "normalized": false,
424
+ "rstrip": true,
425
+ "single_word": false,
426
+ "special": true
427
+ },
428
+ "100309": {
429
+ "content": "<|dummy_45|>",
430
+ "lstrip": true,
431
+ "normalized": false,
432
+ "rstrip": true,
433
+ "single_word": false,
434
+ "special": true
435
+ },
436
+ "100310": {
437
+ "content": "<|dummy_46|>",
438
+ "lstrip": true,
439
+ "normalized": false,
440
+ "rstrip": true,
441
+ "single_word": false,
442
+ "special": true
443
+ },
444
+ "100311": {
445
+ "content": "<|dummy_47|>",
446
+ "lstrip": true,
447
+ "normalized": false,
448
+ "rstrip": true,
449
+ "single_word": false,
450
+ "special": true
451
+ },
452
+ "100312": {
453
+ "content": "<|dummy_48|>",
454
+ "lstrip": true,
455
+ "normalized": false,
456
+ "rstrip": true,
457
+ "single_word": false,
458
+ "special": true
459
+ },
460
+ "100313": {
461
+ "content": "<|dummy_49|>",
462
+ "lstrip": true,
463
+ "normalized": false,
464
+ "rstrip": true,
465
+ "single_word": false,
466
+ "special": true
467
+ },
468
+ "100314": {
469
+ "content": "<|dummy_50|>",
470
+ "lstrip": true,
471
+ "normalized": false,
472
+ "rstrip": true,
473
+ "single_word": false,
474
+ "special": true
475
+ },
476
+ "100315": {
477
+ "content": "<|dummy_51|>",
478
+ "lstrip": true,
479
+ "normalized": false,
480
+ "rstrip": true,
481
+ "single_word": false,
482
+ "special": true
483
+ },
484
+ "100316": {
485
+ "content": "<|dummy_52|>",
486
+ "lstrip": true,
487
+ "normalized": false,
488
+ "rstrip": true,
489
+ "single_word": false,
490
+ "special": true
491
+ },
492
+ "100317": {
493
+ "content": "<|dummy_53|>",
494
+ "lstrip": true,
495
+ "normalized": false,
496
+ "rstrip": true,
497
+ "single_word": false,
498
+ "special": true
499
+ },
500
+ "100318": {
501
+ "content": "<|dummy_54|>",
502
+ "lstrip": true,
503
+ "normalized": false,
504
+ "rstrip": true,
505
+ "single_word": false,
506
+ "special": true
507
+ },
508
+ "100319": {
509
+ "content": "<|dummy_55|>",
510
+ "lstrip": true,
511
+ "normalized": false,
512
+ "rstrip": true,
513
+ "single_word": false,
514
+ "special": true
515
+ },
516
+ "100320": {
517
+ "content": "<|dummy_56|>",
518
+ "lstrip": true,
519
+ "normalized": false,
520
+ "rstrip": true,
521
+ "single_word": false,
522
+ "special": true
523
+ },
524
+ "100321": {
525
+ "content": "<|dummy_57|>",
526
+ "lstrip": true,
527
+ "normalized": false,
528
+ "rstrip": true,
529
+ "single_word": false,
530
+ "special": true
531
+ },
532
+ "100322": {
533
+ "content": "<|dummy_58|>",
534
+ "lstrip": true,
535
+ "normalized": false,
536
+ "rstrip": true,
537
+ "single_word": false,
538
+ "special": true
539
+ },
540
+ "100323": {
541
+ "content": "<|dummy_59|>",
542
+ "lstrip": true,
543
+ "normalized": false,
544
+ "rstrip": true,
545
+ "single_word": false,
546
+ "special": true
547
+ },
548
+ "100324": {
549
+ "content": "<|dummy_60|>",
550
+ "lstrip": true,
551
+ "normalized": false,
552
+ "rstrip": true,
553
+ "single_word": false,
554
+ "special": true
555
+ },
556
+ "100325": {
557
+ "content": "<|dummy_61|>",
558
+ "lstrip": true,
559
+ "normalized": false,
560
+ "rstrip": true,
561
+ "single_word": false,
562
+ "special": true
563
+ },
564
+ "100326": {
565
+ "content": "<|dummy_62|>",
566
+ "lstrip": true,
567
+ "normalized": false,
568
+ "rstrip": true,
569
+ "single_word": false,
570
+ "special": true
571
+ },
572
+ "100327": {
573
+ "content": "<|dummy_63|>",
574
+ "lstrip": true,
575
+ "normalized": false,
576
+ "rstrip": true,
577
+ "single_word": false,
578
+ "special": true
579
+ },
580
+ "100328": {
581
+ "content": "<|dummy_64|>",
582
+ "lstrip": true,
583
+ "normalized": false,
584
+ "rstrip": true,
585
+ "single_word": false,
586
+ "special": true
587
+ },
588
+ "100329": {
589
+ "content": "<|dummy_65|>",
590
+ "lstrip": true,
591
+ "normalized": false,
592
+ "rstrip": true,
593
+ "single_word": false,
594
+ "special": true
595
+ },
596
+ "100330": {
597
+ "content": "<|dummy_66|>",
598
+ "lstrip": true,
599
+ "normalized": false,
600
+ "rstrip": true,
601
+ "single_word": false,
602
+ "special": true
603
+ },
604
+ "100331": {
605
+ "content": "<|dummy_67|>",
606
+ "lstrip": true,
607
+ "normalized": false,
608
+ "rstrip": true,
609
+ "single_word": false,
610
+ "special": true
611
+ },
612
+ "100332": {
613
+ "content": "<|dummy_68|>",
614
+ "lstrip": true,
615
+ "normalized": false,
616
+ "rstrip": true,
617
+ "single_word": false,
618
+ "special": true
619
+ },
620
+ "100333": {
621
+ "content": "<|dummy_69|>",
622
+ "lstrip": true,
623
+ "normalized": false,
624
+ "rstrip": true,
625
+ "single_word": false,
626
+ "special": true
627
+ },
628
+ "100334": {
629
+ "content": "<|dummy_70|>",
630
+ "lstrip": true,
631
+ "normalized": false,
632
+ "rstrip": true,
633
+ "single_word": false,
634
+ "special": true
635
+ },
636
+ "100335": {
637
+ "content": "<|dummy_71|>",
638
+ "lstrip": true,
639
+ "normalized": false,
640
+ "rstrip": true,
641
+ "single_word": false,
642
+ "special": true
643
+ },
644
+ "100336": {
645
+ "content": "<|dummy_72|>",
646
+ "lstrip": true,
647
+ "normalized": false,
648
+ "rstrip": true,
649
+ "single_word": false,
650
+ "special": true
651
+ },
652
+ "100337": {
653
+ "content": "<|dummy_73|>",
654
+ "lstrip": true,
655
+ "normalized": false,
656
+ "rstrip": true,
657
+ "single_word": false,
658
+ "special": true
659
+ },
660
+ "100338": {
661
+ "content": "<|dummy_74|>",
662
+ "lstrip": true,
663
+ "normalized": false,
664
+ "rstrip": true,
665
+ "single_word": false,
666
+ "special": true
667
+ },
668
+ "100339": {
669
+ "content": "<|dummy_75|>",
670
+ "lstrip": true,
671
+ "normalized": false,
672
+ "rstrip": true,
673
+ "single_word": false,
674
+ "special": true
675
+ },
676
+ "100340": {
677
+ "content": "<|dummy_76|>",
678
+ "lstrip": true,
679
+ "normalized": false,
680
+ "rstrip": true,
681
+ "single_word": false,
682
+ "special": true
683
+ },
684
+ "100341": {
685
+ "content": "<|dummy_77|>",
686
+ "lstrip": true,
687
+ "normalized": false,
688
+ "rstrip": true,
689
+ "single_word": false,
690
+ "special": true
691
+ },
692
+ "100342": {
693
+ "content": "<|dummy_78|>",
694
+ "lstrip": true,
695
+ "normalized": false,
696
+ "rstrip": true,
697
+ "single_word": false,
698
+ "special": true
699
+ },
700
+ "100343": {
701
+ "content": "<|dummy_79|>",
702
+ "lstrip": true,
703
+ "normalized": false,
704
+ "rstrip": true,
705
+ "single_word": false,
706
+ "special": true
707
+ },
708
+ "100344": {
709
+ "content": "<|dummy_80|>",
710
+ "lstrip": true,
711
+ "normalized": false,
712
+ "rstrip": true,
713
+ "single_word": false,
714
+ "special": true
715
+ },
716
+ "100345": {
717
+ "content": "<|dummy_81|>",
718
+ "lstrip": true,
719
+ "normalized": false,
720
+ "rstrip": true,
721
+ "single_word": false,
722
+ "special": true
723
+ },
724
+ "100346": {
725
+ "content": "<|dummy_82|>",
726
+ "lstrip": true,
727
+ "normalized": false,
728
+ "rstrip": true,
729
+ "single_word": false,
730
+ "special": true
731
+ },
732
+ "100347": {
733
+ "content": "<|dummy_83|>",
734
+ "lstrip": true,
735
+ "normalized": false,
736
+ "rstrip": true,
737
+ "single_word": false,
738
+ "special": true
739
+ },
740
+ "100348": {
741
+ "content": "<|dummy_84|>",
742
+ "lstrip": true,
743
+ "normalized": false,
744
+ "rstrip": true,
745
+ "single_word": false,
746
+ "special": true
747
+ },
748
+ "100349": {
749
+ "content": "<|dummy_85|>",
750
+ "lstrip": true,
751
+ "normalized": false,
752
+ "rstrip": true,
753
+ "single_word": false,
754
+ "special": true
755
+ },
756
+ "100350": {
757
+ "content": "<|dummy_86|>",
758
+ "lstrip": true,
759
+ "normalized": false,
760
+ "rstrip": true,
761
+ "single_word": false,
762
+ "special": true
763
+ },
764
+ "100351": {
765
+ "content": "<|dummy_87|>",
766
+ "lstrip": true,
767
+ "normalized": false,
768
+ "rstrip": true,
769
+ "single_word": false,
770
+ "special": true
771
+ }
772
+ },
773
+ "bos_token": "<|endoftext|>",
774
+ "chat_template": "{% for message in messages %}{% if (message['role'] == 'system') %}{{'<|im_start|>system<|im_sep|>' + message['content'] + '<|im_end|>'}}{% elif (message['role'] == 'user') %}{{'<|im_start|>user<|im_sep|>' + message['content'] + '<|im_end|>'}}{% elif (message['role'] == 'assistant') %}{{'<|im_start|>assistant<|im_sep|>' + message['content'] + '<|im_end|>'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant<|im_sep|>' }}{% endif %}",
775
+ "clean_up_tokenization_spaces": false,
776
+ "eos_token": "<|im_end|>",
777
+ "extra_special_tokens": {},
778
+ "model_max_length": 8192,
779
+ "pad_token": "<|dummy_85|>",
780
+ "padding_side": "right",
781
+ "split_special_tokens": false,
782
+ "tokenizer_class": "GPT2Tokenizer",
783
+ "unk_token": "<|endoftext|>"
784
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 1, "total_steps": 117, "loss": 1.8884, "lr": 1.6666666666666667e-06, "epoch": 0.008547008547008548, "percentage": 0.85, "elapsed_time": "0:03:29", "remaining_time": "6:45:06"}
2
+ {"current_steps": 2, "total_steps": 117, "loss": 1.8929, "lr": 3.3333333333333333e-06, "epoch": 0.017094017094017096, "percentage": 1.71, "elapsed_time": "0:06:43", "remaining_time": "6:26:44"}
3
+ {"current_steps": 3, "total_steps": 117, "loss": 1.9025, "lr": 5e-06, "epoch": 0.02564102564102564, "percentage": 2.56, "elapsed_time": "0:09:59", "remaining_time": "6:19:27"}
4
+ {"current_steps": 4, "total_steps": 117, "loss": 1.8795, "lr": 6.666666666666667e-06, "epoch": 0.03418803418803419, "percentage": 3.42, "elapsed_time": "0:13:13", "remaining_time": "6:13:42"}
5
+ {"current_steps": 5, "total_steps": 117, "loss": 1.8705, "lr": 8.333333333333334e-06, "epoch": 0.042735042735042736, "percentage": 4.27, "elapsed_time": "0:16:25", "remaining_time": "6:07:48"}
6
+ {"current_steps": 6, "total_steps": 117, "loss": 1.8572, "lr": 1e-05, "epoch": 0.05128205128205128, "percentage": 5.13, "elapsed_time": "0:19:36", "remaining_time": "6:02:43"}
7
+ {"current_steps": 7, "total_steps": 117, "loss": 1.8696, "lr": 9.99799753559161e-06, "epoch": 0.05982905982905983, "percentage": 5.98, "elapsed_time": "0:22:48", "remaining_time": "5:58:17"}
8
+ {"current_steps": 8, "total_steps": 117, "loss": 1.8535, "lr": 9.991991746311916e-06, "epoch": 0.06837606837606838, "percentage": 6.84, "elapsed_time": "0:25:58", "remaining_time": "5:53:58"}
9
+ {"current_steps": 9, "total_steps": 117, "loss": 1.8623, "lr": 9.981987442712634e-06, "epoch": 0.07692307692307693, "percentage": 7.69, "elapsed_time": "0:29:09", "remaining_time": "5:49:49"}
10
+ {"current_steps": 10, "total_steps": 117, "loss": 1.8575, "lr": 9.967992638098517e-06, "epoch": 0.08547008547008547, "percentage": 8.55, "elapsed_time": "0:32:18", "remaining_time": "5:45:44"}
11
+ {"current_steps": 11, "total_steps": 117, "loss": 1.8533, "lr": 9.950018542108818e-06, "epoch": 0.09401709401709402, "percentage": 9.4, "elapsed_time": "0:35:27", "remaining_time": "5:41:44"}
12
+ {"current_steps": 12, "total_steps": 117, "loss": 1.8447, "lr": 9.928079551738542e-06, "epoch": 0.10256410256410256, "percentage": 10.26, "elapsed_time": "0:38:36", "remaining_time": "5:37:46"}
13
+ {"current_steps": 13, "total_steps": 117, "loss": 1.8458, "lr": 9.902193239806634e-06, "epoch": 0.1111111111111111, "percentage": 11.11, "elapsed_time": "0:41:47", "remaining_time": "5:34:16"}
14
+ {"current_steps": 14, "total_steps": 117, "loss": 1.8447, "lr": 9.872380340880416e-06, "epoch": 0.11965811965811966, "percentage": 11.97, "elapsed_time": "0:44:57", "remaining_time": "5:30:42"}
15
+ {"current_steps": 15, "total_steps": 117, "loss": 1.8474, "lr": 9.838664734667496e-06, "epoch": 0.1282051282051282, "percentage": 12.82, "elapsed_time": "0:48:05", "remaining_time": "5:27:01"}
16
+ {"current_steps": 16, "total_steps": 117, "loss": 1.8424, "lr": 9.801073426888447e-06, "epoch": 0.13675213675213677, "percentage": 13.68, "elapsed_time": "0:51:14", "remaining_time": "5:23:30"}
17
+ {"current_steps": 17, "total_steps": 117, "loss": 1.8274, "lr": 9.759636527645633e-06, "epoch": 0.1452991452991453, "percentage": 14.53, "elapsed_time": "0:54:23", "remaining_time": "5:19:58"}
18
+ {"current_steps": 18, "total_steps": 117, "loss": 1.8262, "lr": 9.714387227305422e-06, "epoch": 0.15384615384615385, "percentage": 15.38, "elapsed_time": "0:57:34", "remaining_time": "5:16:41"}
19
+ {"current_steps": 19, "total_steps": 117, "loss": 1.8331, "lr": 9.665361769913187e-06, "epoch": 0.1623931623931624, "percentage": 16.24, "elapsed_time": "1:00:44", "remaining_time": "5:13:17"}
20
+ {"current_steps": 20, "total_steps": 117, "loss": 1.8278, "lr": 9.612599424162344e-06, "epoch": 0.17094017094017094, "percentage": 17.09, "elapsed_time": "1:03:55", "remaining_time": "5:10:00"}
21
+ {"current_steps": 21, "total_steps": 117, "loss": 1.8238, "lr": 9.55614245194068e-06, "epoch": 0.1794871794871795, "percentage": 17.95, "elapsed_time": "1:07:03", "remaining_time": "5:06:31"}
22
+ {"current_steps": 22, "total_steps": 117, "loss": 1.8282, "lr": 9.496036074479184e-06, "epoch": 0.18803418803418803, "percentage": 18.8, "elapsed_time": "1:10:13", "remaining_time": "5:03:13"}
23
+ {"current_steps": 23, "total_steps": 117, "loss": 1.825, "lr": 9.432328436130493e-06, "epoch": 0.19658119658119658, "percentage": 19.66, "elapsed_time": "1:13:22", "remaining_time": "4:59:54"}
24
+ {"current_steps": 24, "total_steps": 117, "loss": 1.8165, "lr": 9.365070565805941e-06, "epoch": 0.20512820512820512, "percentage": 20.51, "elapsed_time": "1:16:34", "remaining_time": "4:56:43"}
25
+ {"current_steps": 25, "total_steps": 117, "loss": 1.8194, "lr": 9.294316336102132e-06, "epoch": 0.21367521367521367, "percentage": 21.37, "elapsed_time": "1:19:43", "remaining_time": "4:53:23"}
26
+ {"current_steps": 26, "total_steps": 117, "loss": 1.822, "lr": 9.220122420149753e-06, "epoch": 0.2222222222222222, "percentage": 22.22, "elapsed_time": "1:22:54", "remaining_time": "4:50:09"}
27
+ {"current_steps": 27, "total_steps": 117, "loss": 1.8218, "lr": 9.142548246219212e-06, "epoch": 0.23076923076923078, "percentage": 23.08, "elapsed_time": "1:26:05", "remaining_time": "4:46:59"}
28
+ {"current_steps": 28, "total_steps": 117, "loss": 1.8245, "lr": 9.06165595011943e-06, "epoch": 0.23931623931623933, "percentage": 23.93, "elapsed_time": "1:29:16", "remaining_time": "4:43:46"}
29
+ {"current_steps": 29, "total_steps": 117, "loss": 1.8087, "lr": 8.97751032542795e-06, "epoch": 0.24786324786324787, "percentage": 24.79, "elapsed_time": "1:32:26", "remaining_time": "4:40:31"}
30
+ {"current_steps": 30, "total_steps": 117, "loss": 1.8097, "lr": 8.890178771592198e-06, "epoch": 0.2564102564102564, "percentage": 25.64, "elapsed_time": "1:35:37", "remaining_time": "4:37:19"}
31
+ {"current_steps": 31, "total_steps": 117, "loss": 1.8207, "lr": 8.799731239943488e-06, "epoch": 0.26495726495726496, "percentage": 26.5, "elapsed_time": "1:38:48", "remaining_time": "4:34:05"}
32
+ {"current_steps": 32, "total_steps": 117, "loss": 1.8124, "lr": 8.706240177667003e-06, "epoch": 0.27350427350427353, "percentage": 27.35, "elapsed_time": "1:41:58", "remaining_time": "4:30:51"}
33
+ {"current_steps": 33, "total_steps": 117, "loss": 1.8184, "lr": 8.609780469772623e-06, "epoch": 0.28205128205128205, "percentage": 28.21, "elapsed_time": "1:45:07", "remaining_time": "4:27:34"}
34
+ {"current_steps": 34, "total_steps": 117, "loss": 1.819, "lr": 8.510429379113114e-06, "epoch": 0.2905982905982906, "percentage": 29.06, "elapsed_time": "1:48:15", "remaining_time": "4:24:17"}
35
+ {"current_steps": 35, "total_steps": 117, "loss": 1.8153, "lr": 8.408266484497664e-06, "epoch": 0.29914529914529914, "percentage": 29.91, "elapsed_time": "1:51:23", "remaining_time": "4:20:58"}
36
+ {"current_steps": 36, "total_steps": 117, "loss": 1.8141, "lr": 8.303373616950408e-06, "epoch": 0.3076923076923077, "percentage": 30.77, "elapsed_time": "1:54:33", "remaining_time": "4:17:44"}
37
+ {"current_steps": 37, "total_steps": 117, "loss": 1.8072, "lr": 8.195834794164925e-06, "epoch": 0.3162393162393162, "percentage": 31.62, "elapsed_time": "1:57:40", "remaining_time": "4:14:25"}
38
+ {"current_steps": 38, "total_steps": 117, "loss": 1.8171, "lr": 8.085736153207277e-06, "epoch": 0.3247863247863248, "percentage": 32.48, "elapsed_time": "2:00:51", "remaining_time": "4:11:15"}
39
+ {"current_steps": 39, "total_steps": 117, "loss": 1.8192, "lr": 7.973165881521435e-06, "epoch": 0.3333333333333333, "percentage": 33.33, "elapsed_time": "2:04:00", "remaining_time": "4:08:00"}
40
+ {"current_steps": 40, "total_steps": 117, "loss": 1.8069, "lr": 7.858214146292394e-06, "epoch": 0.3418803418803419, "percentage": 34.19, "elapsed_time": "2:07:09", "remaining_time": "4:04:47"}
41
+ {"current_steps": 41, "total_steps": 117, "loss": 1.8093, "lr": 7.74097302222355e-06, "epoch": 0.3504273504273504, "percentage": 35.04, "elapsed_time": "2:10:19", "remaining_time": "4:01:33"}
42
+ {"current_steps": 42, "total_steps": 117, "loss": 1.8014, "lr": 7.621536417786159e-06, "epoch": 0.358974358974359, "percentage": 35.9, "elapsed_time": "2:13:29", "remaining_time": "3:58:23"}
43
+ {"current_steps": 43, "total_steps": 117, "loss": 1.8108, "lr": 7.500000000000001e-06, "epoch": 0.36752136752136755, "percentage": 36.75, "elapsed_time": "2:16:38", "remaining_time": "3:55:09"}
44
+ {"current_steps": 44, "total_steps": 117, "loss": 1.8069, "lr": 7.37646111780545e-06, "epoch": 0.37606837606837606, "percentage": 37.61, "elapsed_time": "2:19:47", "remaining_time": "3:51:56"}
45
+ {"current_steps": 45, "total_steps": 117, "loss": 1.8057, "lr": 7.251018724088367e-06, "epoch": 0.38461538461538464, "percentage": 38.46, "elapsed_time": "2:22:56", "remaining_time": "3:48:43"}
46
+ {"current_steps": 46, "total_steps": 117, "loss": 1.8065, "lr": 7.12377329642024e-06, "epoch": 0.39316239316239315, "percentage": 39.32, "elapsed_time": "2:26:05", "remaining_time": "3:45:29"}
47
+ {"current_steps": 47, "total_steps": 117, "loss": 1.7997, "lr": 6.994826756577082e-06, "epoch": 0.4017094017094017, "percentage": 40.17, "elapsed_time": "2:29:13", "remaining_time": "3:42:15"}
48
+ {"current_steps": 48, "total_steps": 117, "loss": 1.8073, "lr": 6.864282388901544e-06, "epoch": 0.41025641025641024, "percentage": 41.03, "elapsed_time": "2:32:22", "remaining_time": "3:39:01"}
49
+ {"current_steps": 49, "total_steps": 117, "loss": 1.8056, "lr": 6.732244757573619e-06, "epoch": 0.4188034188034188, "percentage": 41.88, "elapsed_time": "2:35:30", "remaining_time": "3:35:47"}
50
+ {"current_steps": 50, "total_steps": 117, "loss": 1.807, "lr": 6.598819622856227e-06, "epoch": 0.42735042735042733, "percentage": 42.74, "elapsed_time": "2:38:38", "remaining_time": "3:32:35"}
51
+ {"current_steps": 51, "total_steps": 117, "loss": 1.8098, "lr": 6.464113856382752e-06, "epoch": 0.4358974358974359, "percentage": 43.59, "elapsed_time": "2:41:48", "remaining_time": "3:29:23"}
52
+ {"current_steps": 52, "total_steps": 117, "loss": 1.8016, "lr": 6.328235355554382e-06, "epoch": 0.4444444444444444, "percentage": 44.44, "elapsed_time": "2:44:59", "remaining_time": "3:26:14"}
53
+ {"current_steps": 53, "total_steps": 117, "loss": 1.8111, "lr": 6.191292957115825e-06, "epoch": 0.452991452991453, "percentage": 45.3, "elapsed_time": "2:48:09", "remaining_time": "3:23:03"}
54
+ {"current_steps": 54, "total_steps": 117, "loss": 1.8038, "lr": 6.053396349978632e-06, "epoch": 0.46153846153846156, "percentage": 46.15, "elapsed_time": "2:51:18", "remaining_time": "3:19:51"}
55
+ {"current_steps": 55, "total_steps": 117, "loss": 1.7871, "lr": 5.914655987361934e-06, "epoch": 0.4700854700854701, "percentage": 47.01, "elapsed_time": "2:54:27", "remaining_time": "3:16:39"}
56
+ {"current_steps": 56, "total_steps": 117, "loss": 1.8018, "lr": 5.77518299832099e-06, "epoch": 0.47863247863247865, "percentage": 47.86, "elapsed_time": "2:57:36", "remaining_time": "3:13:27"}
57
+ {"current_steps": 57, "total_steps": 117, "loss": 1.8044, "lr": 5.635089098734394e-06, "epoch": 0.48717948717948717, "percentage": 48.72, "elapsed_time": "3:00:45", "remaining_time": "3:10:16"}
58
+ {"current_steps": 58, "total_steps": 117, "loss": 1.8005, "lr": 5.49448650182125e-06, "epoch": 0.49572649572649574, "percentage": 49.57, "elapsed_time": "3:03:54", "remaining_time": "3:07:05"}
59
+ {"current_steps": 59, "total_steps": 117, "loss": 1.8026, "lr": 5.353487828259973e-06, "epoch": 0.5042735042735043, "percentage": 50.43, "elapsed_time": "3:07:03", "remaining_time": "3:03:52"}
60
+ {"current_steps": 60, "total_steps": 117, "loss": 1.7954, "lr": 5.212206015980742e-06, "epoch": 0.5128205128205128, "percentage": 51.28, "elapsed_time": "3:10:11", "remaining_time": "3:00:40"}
61
+ {"current_steps": 61, "total_steps": 117, "loss": 1.7961, "lr": 5.070754229703811e-06, "epoch": 0.5213675213675214, "percentage": 52.14, "elapsed_time": "3:13:18", "remaining_time": "2:57:28"}
62
+ {"current_steps": 62, "total_steps": 117, "loss": 1.8036, "lr": 4.929245770296191e-06, "epoch": 0.5299145299145299, "percentage": 52.99, "elapsed_time": "3:16:27", "remaining_time": "2:54:16"}
63
+ {"current_steps": 63, "total_steps": 117, "loss": 1.8029, "lr": 4.78779398401926e-06, "epoch": 0.5384615384615384, "percentage": 53.85, "elapsed_time": "3:19:36", "remaining_time": "2:51:05"}
64
+ {"current_steps": 64, "total_steps": 117, "loss": 1.8026, "lr": 4.646512171740028e-06, "epoch": 0.5470085470085471, "percentage": 54.7, "elapsed_time": "3:22:44", "remaining_time": "2:47:53"}
65
+ {"current_steps": 65, "total_steps": 117, "loss": 1.8079, "lr": 4.505513498178752e-06, "epoch": 0.5555555555555556, "percentage": 55.56, "elapsed_time": "3:25:51", "remaining_time": "2:44:41"}
66
+ {"current_steps": 66, "total_steps": 117, "loss": 1.8008, "lr": 4.364910901265607e-06, "epoch": 0.5641025641025641, "percentage": 56.41, "elapsed_time": "3:29:00", "remaining_time": "2:41:30"}
67
+ {"current_steps": 67, "total_steps": 117, "loss": 1.7983, "lr": 4.224817001679011e-06, "epoch": 0.5726495726495726, "percentage": 57.26, "elapsed_time": "3:32:12", "remaining_time": "2:38:21"}
68
+ {"current_steps": 68, "total_steps": 117, "loss": 1.799, "lr": 4.085344012638067e-06, "epoch": 0.5811965811965812, "percentage": 58.12, "elapsed_time": "3:35:21", "remaining_time": "2:35:11"}
69
+ {"current_steps": 69, "total_steps": 117, "loss": 1.7981, "lr": 3.94660365002137e-06, "epoch": 0.5897435897435898, "percentage": 58.97, "elapsed_time": "3:38:31", "remaining_time": "2:32:01"}
70
+ {"current_steps": 70, "total_steps": 117, "loss": 1.7983, "lr": 3.808707042884176e-06, "epoch": 0.5982905982905983, "percentage": 59.83, "elapsed_time": "3:41:40", "remaining_time": "2:28:50"}
71
+ {"current_steps": 71, "total_steps": 117, "loss": 1.7924, "lr": 3.6717646444456196e-06, "epoch": 0.6068376068376068, "percentage": 60.68, "elapsed_time": "3:44:51", "remaining_time": "2:25:41"}
72
+ {"current_steps": 72, "total_steps": 117, "loss": 1.799, "lr": 3.5358861436172487e-06, "epoch": 0.6153846153846154, "percentage": 61.54, "elapsed_time": "3:48:00", "remaining_time": "2:22:30"}
73
+ {"current_steps": 73, "total_steps": 117, "loss": 1.8005, "lr": 3.401180377143774e-06, "epoch": 0.6239316239316239, "percentage": 62.39, "elapsed_time": "3:51:11", "remaining_time": "2:19:20"}
74
+ {"current_steps": 74, "total_steps": 117, "loss": 1.7956, "lr": 3.2677552424263836e-06, "epoch": 0.6324786324786325, "percentage": 63.25, "elapsed_time": "3:54:20", "remaining_time": "2:16:10"}
75
+ {"current_steps": 75, "total_steps": 117, "loss": 1.798, "lr": 3.1357176110984578e-06, "epoch": 0.6410256410256411, "percentage": 64.1, "elapsed_time": "3:57:29", "remaining_time": "2:12:59"}
76
+ {"current_steps": 76, "total_steps": 117, "loss": 1.7934, "lr": 3.0051732434229185e-06, "epoch": 0.6495726495726496, "percentage": 64.96, "elapsed_time": "4:00:37", "remaining_time": "2:09:48"}
77
+ {"current_steps": 77, "total_steps": 117, "loss": 1.8012, "lr": 2.8762267035797607e-06, "epoch": 0.6581196581196581, "percentage": 65.81, "elapsed_time": "4:03:46", "remaining_time": "2:06:37"}
78
+ {"current_steps": 78, "total_steps": 117, "loss": 1.7915, "lr": 2.748981275911633e-06, "epoch": 0.6666666666666666, "percentage": 66.67, "elapsed_time": "4:06:54", "remaining_time": "2:03:27"}
79
+ {"current_steps": 79, "total_steps": 117, "loss": 1.7867, "lr": 2.6235388821945497e-06, "epoch": 0.6752136752136753, "percentage": 67.52, "elapsed_time": "4:10:05", "remaining_time": "2:00:17"}
80
+ {"current_steps": 80, "total_steps": 117, "loss": 1.7952, "lr": 2.5000000000000015e-06, "epoch": 0.6837606837606838, "percentage": 68.38, "elapsed_time": "4:13:13", "remaining_time": "1:57:07"}
81
+ {"current_steps": 81, "total_steps": 117, "loss": 1.8026, "lr": 2.3784635822138424e-06, "epoch": 0.6923076923076923, "percentage": 69.23, "elapsed_time": "4:16:22", "remaining_time": "1:53:56"}
82
+ {"current_steps": 82, "total_steps": 117, "loss": 1.7956, "lr": 2.2590269777764516e-06, "epoch": 0.7008547008547008, "percentage": 70.09, "elapsed_time": "4:19:32", "remaining_time": "1:50:46"}
83
+ {"current_steps": 83, "total_steps": 117, "loss": 1.7947, "lr": 2.141785853707607e-06, "epoch": 0.7094017094017094, "percentage": 70.94, "elapsed_time": "4:22:41", "remaining_time": "1:47:36"}
84
+ {"current_steps": 84, "total_steps": 117, "loss": 1.7963, "lr": 2.0268341184785674e-06, "epoch": 0.717948717948718, "percentage": 71.79, "elapsed_time": "4:25:50", "remaining_time": "1:44:26"}
85
+ {"current_steps": 85, "total_steps": 117, "loss": 1.7974, "lr": 1.9142638467927254e-06, "epoch": 0.7264957264957265, "percentage": 72.65, "elapsed_time": "4:28:59", "remaining_time": "1:41:15"}
86
+ {"current_steps": 86, "total_steps": 117, "loss": 1.794, "lr": 1.8041652058350768e-06, "epoch": 0.7350427350427351, "percentage": 73.5, "elapsed_time": "4:32:07", "remaining_time": "1:38:05"}
87
+ {"current_steps": 87, "total_steps": 117, "loss": 1.8017, "lr": 1.6966263830495939e-06, "epoch": 0.7435897435897436, "percentage": 74.36, "elapsed_time": "4:35:14", "remaining_time": "1:34:54"}
88
+ {"current_steps": 88, "total_steps": 117, "loss": 1.7874, "lr": 1.5917335155023368e-06, "epoch": 0.7521367521367521, "percentage": 75.21, "elapsed_time": "4:38:24", "remaining_time": "1:31:44"}
89
+ {"current_steps": 89, "total_steps": 117, "loss": 1.8012, "lr": 1.4895706208868876e-06, "epoch": 0.7606837606837606, "percentage": 76.07, "elapsed_time": "4:41:32", "remaining_time": "1:28:34"}
90
+ {"current_steps": 90, "total_steps": 117, "loss": 1.8029, "lr": 1.390219530227378e-06, "epoch": 0.7692307692307693, "percentage": 76.92, "elapsed_time": "4:44:40", "remaining_time": "1:25:24"}
91
+ {"current_steps": 91, "total_steps": 117, "loss": 1.7955, "lr": 1.2937598223330006e-06, "epoch": 0.7777777777777778, "percentage": 77.78, "elapsed_time": "4:47:48", "remaining_time": "1:22:13"}
92
+ {"current_steps": 92, "total_steps": 117, "loss": 1.7919, "lr": 1.2002687600565138e-06, "epoch": 0.7863247863247863, "percentage": 78.63, "elapsed_time": "4:50:56", "remaining_time": "1:19:03"}
93
+ {"current_steps": 93, "total_steps": 117, "loss": 1.794, "lr": 1.1098212284078037e-06, "epoch": 0.7948717948717948, "percentage": 79.49, "elapsed_time": "4:54:04", "remaining_time": "1:15:53"}
94
+ {"current_steps": 94, "total_steps": 117, "loss": 1.7978, "lr": 1.0224896745720513e-06, "epoch": 0.8034188034188035, "percentage": 80.34, "elapsed_time": "4:57:12", "remaining_time": "1:12:43"}
95
+ {"current_steps": 95, "total_steps": 117, "loss": 1.8013, "lr": 9.383440498805712e-07, "epoch": 0.811965811965812, "percentage": 81.2, "elapsed_time": "5:00:24", "remaining_time": "1:09:34"}
96
+ {"current_steps": 96, "total_steps": 117, "loss": 1.7928, "lr": 8.574517537807897e-07, "epoch": 0.8205128205128205, "percentage": 82.05, "elapsed_time": "5:03:33", "remaining_time": "1:06:24"}
97
+ {"current_steps": 97, "total_steps": 117, "loss": 1.7875, "lr": 7.798775798502484e-07, "epoch": 0.8290598290598291, "percentage": 82.91, "elapsed_time": "5:06:43", "remaining_time": "1:03:14"}
98
+ {"current_steps": 98, "total_steps": 117, "loss": 1.7868, "lr": 7.056836638978698e-07, "epoch": 0.8376068376068376, "percentage": 83.76, "elapsed_time": "5:09:54", "remaining_time": "1:00:05"}
99
+ {"current_steps": 99, "total_steps": 117, "loss": 1.7947, "lr": 6.349294341940593e-07, "epoch": 0.8461538461538461, "percentage": 84.62, "elapsed_time": "5:13:05", "remaining_time": "0:56:55"}
100
+ {"current_steps": 100, "total_steps": 117, "loss": 1.7933, "lr": 5.676715638695063e-07, "epoch": 0.8547008547008547, "percentage": 85.47, "elapsed_time": "5:16:13", "remaining_time": "0:53:45"}
101
+ {"current_steps": 101, "total_steps": 117, "loss": 1.7983, "lr": 5.039639255208156e-07, "epoch": 0.8632478632478633, "percentage": 86.32, "elapsed_time": "5:21:46", "remaining_time": "0:50:58"}
102
+ {"current_steps": 102, "total_steps": 117, "loss": 1.7923, "lr": 4.43857548059321e-07, "epoch": 0.8717948717948718, "percentage": 87.18, "elapsed_time": "5:24:55", "remaining_time": "0:47:46"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bbcb0763afe7ddb533417233ceab289a75850df67ec6d1a045ba6d98406b7a67
3
+ size 7875
vocab.json ADDED
The diff for this file is too large to render. See raw diff