PureFighter commited on
Commit
2c8a6f4
1 Parent(s): c21d05f

Training in progress, step 500, checkpoint

Browse files
last-checkpoint/config.json ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "PureFighter/autotrain-topic-classification",
3
+ "_num_labels": 6,
4
+ "architectures": [
5
+ "BertForSequenceClassification"
6
+ ],
7
+ "attention_probs_dropout_prob": 0.1,
8
+ "classifier_dropout": null,
9
+ "finetuning_task": "text-classification",
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 768,
13
+ "id2label": {
14
+ "0": "LABEL_0",
15
+ "1": "LABEL_1",
16
+ "2": "LABEL_2",
17
+ "3": "LABEL_3",
18
+ "4": "LABEL_4",
19
+ "5": "LABEL_5",
20
+ "6": "LABEL_6",
21
+ "7": "LABEL_7",
22
+ "8": "LABEL_8",
23
+ "9": "LABEL_9"
24
+ },
25
+ "initializer_range": 0.02,
26
+ "intermediate_size": 3072,
27
+ "label2id": {
28
+ "LABEL_0": 0,
29
+ "LABEL_1": 1,
30
+ "LABEL_2": 2,
31
+ "LABEL_3": 3,
32
+ "LABEL_4": 4,
33
+ "LABEL_5": 5,
34
+ "LABEL_6": 6,
35
+ "LABEL_7": 7,
36
+ "LABEL_8": 8,
37
+ "LABEL_9": 9
38
+ },
39
+ "layer_norm_eps": 1e-12,
40
+ "max_position_embeddings": 512,
41
+ "model_type": "bert",
42
+ "num_attention_heads": 12,
43
+ "num_hidden_layers": 12,
44
+ "pad_token_id": 31,
45
+ "position_embedding_type": "absolute",
46
+ "problem_type": "single_label_classification",
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.44.0",
49
+ "type_vocab_size": 2,
50
+ "use_cache": true,
51
+ "vocab_size": 64000
52
+ }
last-checkpoint/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd5dde95ad6c157a2fe95cc958dbb18c26699082da64c338dab9848d4c67c19a
3
+ size 540827680
last-checkpoint/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ea50f6e51285e2a51883bc21cde22e02fb19d673bd8e39e2afff7b2db706d12
3
+ size 1081770746
last-checkpoint/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:06e55bfc8723f269a626afca0be6f7def5753f3bb265436b94c5580b703cfcc7
3
+ size 13990
last-checkpoint/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8967d5c23dc2de82573da94680305eacf32d7b908df4950b6f5a1314e5be47e
3
+ size 1064
last-checkpoint/special_tokens_map.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": {
3
+ "content": "[CLS]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "mask_token": {
10
+ "content": "[MASK]",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "sep_token": {
24
+ "content": "[SEP]",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "unk_token": {
31
+ "content": "[UNK]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ }
37
+ }
last-checkpoint/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
last-checkpoint/tokenizer_config.json ADDED
@@ -0,0 +1,345 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "+ا",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": true,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "+ك",
13
+ "lstrip": false,
14
+ "normalized": true,
15
+ "rstrip": false,
16
+ "single_word": true,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "ب+",
21
+ "lstrip": false,
22
+ "normalized": true,
23
+ "rstrip": false,
24
+ "single_word": true,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "+هم",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": true,
33
+ "special": true
34
+ },
35
+ "4": {
36
+ "content": "+ات",
37
+ "lstrip": false,
38
+ "normalized": true,
39
+ "rstrip": false,
40
+ "single_word": true,
41
+ "special": true
42
+ },
43
+ "5": {
44
+ "content": "+ي",
45
+ "lstrip": false,
46
+ "normalized": true,
47
+ "rstrip": false,
48
+ "single_word": true,
49
+ "special": true
50
+ },
51
+ "6": {
52
+ "content": "ل+",
53
+ "lstrip": false,
54
+ "normalized": true,
55
+ "rstrip": false,
56
+ "single_word": true,
57
+ "special": true
58
+ },
59
+ "7": {
60
+ "content": "+هما",
61
+ "lstrip": false,
62
+ "normalized": true,
63
+ "rstrip": false,
64
+ "single_word": true,
65
+ "special": true
66
+ },
67
+ "8": {
68
+ "content": "+نا",
69
+ "lstrip": false,
70
+ "normalized": true,
71
+ "rstrip": false,
72
+ "single_word": true,
73
+ "special": true
74
+ },
75
+ "9": {
76
+ "content": "+ن",
77
+ "lstrip": false,
78
+ "normalized": true,
79
+ "rstrip": false,
80
+ "single_word": true,
81
+ "special": true
82
+ },
83
+ "10": {
84
+ "content": "+ها",
85
+ "lstrip": false,
86
+ "normalized": true,
87
+ "rstrip": false,
88
+ "single_word": true,
89
+ "special": true
90
+ },
91
+ "11": {
92
+ "content": "+كما",
93
+ "lstrip": false,
94
+ "normalized": true,
95
+ "rstrip": false,
96
+ "single_word": true,
97
+ "special": true
98
+ },
99
+ "12": {
100
+ "content": "+ة",
101
+ "lstrip": false,
102
+ "normalized": true,
103
+ "rstrip": false,
104
+ "single_word": true,
105
+ "special": true
106
+ },
107
+ "13": {
108
+ "content": "ف+",
109
+ "lstrip": false,
110
+ "normalized": true,
111
+ "rstrip": false,
112
+ "single_word": true,
113
+ "special": true
114
+ },
115
+ "14": {
116
+ "content": "+كم",
117
+ "lstrip": false,
118
+ "normalized": true,
119
+ "rstrip": false,
120
+ "single_word": true,
121
+ "special": true
122
+ },
123
+ "15": {
124
+ "content": "+كن",
125
+ "lstrip": false,
126
+ "normalized": true,
127
+ "rstrip": false,
128
+ "single_word": true,
129
+ "special": true
130
+ },
131
+ "16": {
132
+ "content": "+ت",
133
+ "lstrip": false,
134
+ "normalized": true,
135
+ "rstrip": false,
136
+ "single_word": true,
137
+ "special": true
138
+ },
139
+ "17": {
140
+ "content": "[بريد]",
141
+ "lstrip": false,
142
+ "normalized": true,
143
+ "rstrip": false,
144
+ "single_word": true,
145
+ "special": true
146
+ },
147
+ "18": {
148
+ "content": "[مستخدم]",
149
+ "lstrip": false,
150
+ "normalized": true,
151
+ "rstrip": false,
152
+ "single_word": true,
153
+ "special": true
154
+ },
155
+ "19": {
156
+ "content": "لل+",
157
+ "lstrip": false,
158
+ "normalized": true,
159
+ "rstrip": false,
160
+ "single_word": true,
161
+ "special": true
162
+ },
163
+ "20": {
164
+ "content": "ال+",
165
+ "lstrip": false,
166
+ "normalized": true,
167
+ "rstrip": false,
168
+ "single_word": true,
169
+ "special": true
170
+ },
171
+ "21": {
172
+ "content": "[رابط]",
173
+ "lstrip": false,
174
+ "normalized": true,
175
+ "rstrip": false,
176
+ "single_word": true,
177
+ "special": true
178
+ },
179
+ "22": {
180
+ "content": "س+",
181
+ "lstrip": false,
182
+ "normalized": true,
183
+ "rstrip": false,
184
+ "single_word": true,
185
+ "special": true
186
+ },
187
+ "23": {
188
+ "content": "+ان",
189
+ "lstrip": false,
190
+ "normalized": true,
191
+ "rstrip": false,
192
+ "single_word": true,
193
+ "special": true
194
+ },
195
+ "24": {
196
+ "content": "+وا",
197
+ "lstrip": false,
198
+ "normalized": true,
199
+ "rstrip": false,
200
+ "single_word": true,
201
+ "special": true
202
+ },
203
+ "25": {
204
+ "content": "+ه",
205
+ "lstrip": false,
206
+ "normalized": true,
207
+ "rstrip": false,
208
+ "single_word": true,
209
+ "special": true
210
+ },
211
+ "26": {
212
+ "content": "+ون",
213
+ "lstrip": false,
214
+ "normalized": true,
215
+ "rstrip": false,
216
+ "single_word": true,
217
+ "special": true
218
+ },
219
+ "27": {
220
+ "content": "+هن",
221
+ "lstrip": false,
222
+ "normalized": true,
223
+ "rstrip": false,
224
+ "single_word": true,
225
+ "special": true
226
+ },
227
+ "28": {
228
+ "content": "+ين",
229
+ "lstrip": false,
230
+ "normalized": true,
231
+ "rstrip": false,
232
+ "single_word": true,
233
+ "special": true
234
+ },
235
+ "29": {
236
+ "content": "��+",
237
+ "lstrip": false,
238
+ "normalized": true,
239
+ "rstrip": false,
240
+ "single_word": true,
241
+ "special": true
242
+ },
243
+ "30": {
244
+ "content": "ك+",
245
+ "lstrip": false,
246
+ "normalized": true,
247
+ "rstrip": false,
248
+ "single_word": true,
249
+ "special": true
250
+ },
251
+ "31": {
252
+ "content": "[PAD]",
253
+ "lstrip": false,
254
+ "normalized": false,
255
+ "rstrip": false,
256
+ "single_word": false,
257
+ "special": true
258
+ },
259
+ "32": {
260
+ "content": "[UNK]",
261
+ "lstrip": false,
262
+ "normalized": false,
263
+ "rstrip": false,
264
+ "single_word": false,
265
+ "special": true
266
+ },
267
+ "33": {
268
+ "content": "[CLS]",
269
+ "lstrip": false,
270
+ "normalized": false,
271
+ "rstrip": false,
272
+ "single_word": false,
273
+ "special": true
274
+ },
275
+ "34": {
276
+ "content": "[SEP]",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false,
281
+ "special": true
282
+ },
283
+ "35": {
284
+ "content": "[MASK]",
285
+ "lstrip": false,
286
+ "normalized": false,
287
+ "rstrip": false,
288
+ "single_word": false,
289
+ "special": true
290
+ }
291
+ },
292
+ "clean_up_tokenization_spaces": true,
293
+ "cls_token": "[CLS]",
294
+ "do_basic_tokenize": true,
295
+ "do_lower_case": false,
296
+ "mask_token": "[MASK]",
297
+ "max_len": 512,
298
+ "max_length": 128,
299
+ "model_max_length": 512,
300
+ "never_split": [
301
+ "+ك",
302
+ "+كما",
303
+ "ك+",
304
+ "+وا",
305
+ "+ين",
306
+ "و+",
307
+ "+كن",
308
+ "+ان",
309
+ "+هم",
310
+ "+ة",
311
+ "[بريد]",
312
+ "لل+",
313
+ "+ي",
314
+ "+ت",
315
+ "+ن",
316
+ "س+",
317
+ "ل+",
318
+ "[مستخدم]",
319
+ "+كم",
320
+ "+ا",
321
+ "ب+",
322
+ "ف+",
323
+ "+نا",
324
+ "+ها",
325
+ "+ون",
326
+ "+هما",
327
+ "ال+",
328
+ "+ه",
329
+ "+هن",
330
+ "+ات",
331
+ "[رابط]"
332
+ ],
333
+ "pad_to_multiple_of": null,
334
+ "pad_token": "[PAD]",
335
+ "pad_token_type_id": 0,
336
+ "padding_side": "right",
337
+ "sep_token": "[SEP]",
338
+ "stride": 0,
339
+ "strip_accents": null,
340
+ "tokenize_chinese_chars": true,
341
+ "tokenizer_class": "BertTokenizer",
342
+ "truncation_side": "right",
343
+ "truncation_strategy": "longest_first",
344
+ "unk_token": "[UNK]"
345
+ }
last-checkpoint/trainer_state.json ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.7994011976047904,
3
+ "best_model_checkpoint": "./results/checkpoint-500",
4
+ "epoch": 1.3333333333333333,
5
+ "eval_steps": 250,
6
+ "global_step": 500,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.0026666666666666666,
13
+ "grad_norm": 16.15040397644043,
14
+ "learning_rate": 1.9982222222222224e-05,
15
+ "loss": 2.3987,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.08,
20
+ "grad_norm": 12.260425567626953,
21
+ "learning_rate": 1.9466666666666668e-05,
22
+ "loss": 2.2393,
23
+ "step": 30
24
+ },
25
+ {
26
+ "epoch": 0.16,
27
+ "grad_norm": 8.993585586547852,
28
+ "learning_rate": 1.8933333333333334e-05,
29
+ "loss": 1.7878,
30
+ "step": 60
31
+ },
32
+ {
33
+ "epoch": 0.24,
34
+ "grad_norm": 11.122185707092285,
35
+ "learning_rate": 1.8400000000000003e-05,
36
+ "loss": 1.5155,
37
+ "step": 90
38
+ },
39
+ {
40
+ "epoch": 0.32,
41
+ "grad_norm": 15.829800605773926,
42
+ "learning_rate": 1.7866666666666666e-05,
43
+ "loss": 1.2466,
44
+ "step": 120
45
+ },
46
+ {
47
+ "epoch": 0.4,
48
+ "grad_norm": 14.291194915771484,
49
+ "learning_rate": 1.7333333333333336e-05,
50
+ "loss": 1.1996,
51
+ "step": 150
52
+ },
53
+ {
54
+ "epoch": 0.48,
55
+ "grad_norm": 17.213542938232422,
56
+ "learning_rate": 1.6800000000000002e-05,
57
+ "loss": 1.0749,
58
+ "step": 180
59
+ },
60
+ {
61
+ "epoch": 0.56,
62
+ "grad_norm": 8.14549446105957,
63
+ "learning_rate": 1.6266666666666668e-05,
64
+ "loss": 0.9717,
65
+ "step": 210
66
+ },
67
+ {
68
+ "epoch": 0.64,
69
+ "grad_norm": 13.813669204711914,
70
+ "learning_rate": 1.5733333333333334e-05,
71
+ "loss": 1.1143,
72
+ "step": 240
73
+ },
74
+ {
75
+ "epoch": 0.6666666666666666,
76
+ "eval_accuracy": 0.7455089820359282,
77
+ "eval_loss": 0.9105517268180847,
78
+ "eval_runtime": 128.8034,
79
+ "eval_samples_per_second": 2.593,
80
+ "eval_steps_per_second": 0.326,
81
+ "step": 250
82
+ },
83
+ {
84
+ "epoch": 0.72,
85
+ "grad_norm": 6.617077350616455,
86
+ "learning_rate": 1.5200000000000002e-05,
87
+ "loss": 0.8563,
88
+ "step": 270
89
+ },
90
+ {
91
+ "epoch": 0.8,
92
+ "grad_norm": 7.134422302246094,
93
+ "learning_rate": 1.4666666666666666e-05,
94
+ "loss": 0.8534,
95
+ "step": 300
96
+ },
97
+ {
98
+ "epoch": 0.88,
99
+ "grad_norm": 14.594669342041016,
100
+ "learning_rate": 1.4133333333333334e-05,
101
+ "loss": 0.8166,
102
+ "step": 330
103
+ },
104
+ {
105
+ "epoch": 0.96,
106
+ "grad_norm": 11.417457580566406,
107
+ "learning_rate": 1.3600000000000002e-05,
108
+ "loss": 0.8419,
109
+ "step": 360
110
+ },
111
+ {
112
+ "epoch": 1.04,
113
+ "grad_norm": 13.239908218383789,
114
+ "learning_rate": 1.3066666666666668e-05,
115
+ "loss": 0.7204,
116
+ "step": 390
117
+ },
118
+ {
119
+ "epoch": 1.12,
120
+ "grad_norm": 16.799551010131836,
121
+ "learning_rate": 1.2533333333333336e-05,
122
+ "loss": 0.5632,
123
+ "step": 420
124
+ },
125
+ {
126
+ "epoch": 1.2,
127
+ "grad_norm": 7.094001770019531,
128
+ "learning_rate": 1.2e-05,
129
+ "loss": 0.7082,
130
+ "step": 450
131
+ },
132
+ {
133
+ "epoch": 1.28,
134
+ "grad_norm": 17.73163604736328,
135
+ "learning_rate": 1.1466666666666668e-05,
136
+ "loss": 0.573,
137
+ "step": 480
138
+ },
139
+ {
140
+ "epoch": 1.3333333333333333,
141
+ "eval_accuracy": 0.7994011976047904,
142
+ "eval_loss": 0.6648051738739014,
143
+ "eval_runtime": 125.6819,
144
+ "eval_samples_per_second": 2.658,
145
+ "eval_steps_per_second": 0.334,
146
+ "step": 500
147
+ }
148
+ ],
149
+ "logging_steps": 30,
150
+ "max_steps": 1125,
151
+ "num_input_tokens_seen": 0,
152
+ "num_train_epochs": 3,
153
+ "save_steps": 500,
154
+ "stateful_callbacks": {
155
+ "EarlyStoppingCallback": {
156
+ "args": {
157
+ "early_stopping_patience": 3,
158
+ "early_stopping_threshold": 0.0
159
+ },
160
+ "attributes": {
161
+ "early_stopping_patience_counter": 0
162
+ }
163
+ },
164
+ "TrainerControl": {
165
+ "args": {
166
+ "should_epoch_stop": false,
167
+ "should_evaluate": false,
168
+ "should_log": false,
169
+ "should_save": true,
170
+ "should_training_stop": false
171
+ },
172
+ "attributes": {}
173
+ }
174
+ },
175
+ "total_flos": 262998389326848.0,
176
+ "train_batch_size": 8,
177
+ "trial_name": null,
178
+ "trial_params": null
179
+ }
last-checkpoint/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d65522ff22da8e7b612a9a9efd10c95d76a998e746507c4b04ba066dda9e6c0
3
+ size 5176
last-checkpoint/vocab.txt ADDED
The diff for this file is too large to render. See raw diff