sanchit-gandhi commited on
Commit
2c6d066
·
verified ·
1 Parent(s): e7946df

Training in progress, step 1000

Browse files
Files changed (34) hide show
  1. added_tokens.json +1611 -0
  2. config.json +51 -0
  3. merges.txt +0 -0
  4. model.safetensors +3 -0
  5. normalizer.json +1742 -0
  6. preprocessor_config.json +14 -0
  7. run.sh +34 -0
  8. run_speech_recognition_seq2seq.py +627 -0
  9. runs/Mar27_13-10-05_hf-dgx-01/events.out.tfevents.1711541419.hf-dgx-01.1400507.0 +3 -0
  10. runs/Mar27_14-10-22_hf-dgx-01/events.out.tfevents.1711545032.hf-dgx-01.1482814.0 +3 -0
  11. special_tokens_map.json +139 -0
  12. tokenizer.json +0 -0
  13. tokenizer_config.json +0 -0
  14. training_args.bin +3 -0
  15. vocab.json +0 -0
  16. wandb/debug-cli.sanchit.log +0 -0
  17. wandb/debug-internal.log +0 -0
  18. wandb/debug.log +28 -0
  19. wandb/run-20240327_131020-92ximxsk/files/config.yaml +731 -0
  20. wandb/run-20240327_131020-92ximxsk/files/output.log +896 -0
  21. wandb/run-20240327_131020-92ximxsk/files/requirements.txt +247 -0
  22. wandb/run-20240327_131020-92ximxsk/files/wandb-metadata.json +738 -0
  23. wandb/run-20240327_131020-92ximxsk/files/wandb-summary.json +1 -0
  24. wandb/run-20240327_131020-92ximxsk/logs/debug-internal.log +0 -0
  25. wandb/run-20240327_131020-92ximxsk/logs/debug.log +29 -0
  26. wandb/run-20240327_131020-92ximxsk/run-92ximxsk.wandb +0 -0
  27. wandb/run-20240327_141033-golaq7b9/files/config.yaml +751 -0
  28. wandb/run-20240327_141033-golaq7b9/files/output.log +1036 -0
  29. wandb/run-20240327_141033-golaq7b9/files/requirements.txt +247 -0
  30. wandb/run-20240327_141033-golaq7b9/files/wandb-metadata.json +738 -0
  31. wandb/run-20240327_141033-golaq7b9/files/wandb-summary.json +1 -0
  32. wandb/run-20240327_141033-golaq7b9/logs/debug-internal.log +0 -0
  33. wandb/run-20240327_141033-golaq7b9/logs/debug.log +28 -0
  34. wandb/run-20240327_141033-golaq7b9/run-golaq7b9.wandb +0 -0
added_tokens.json ADDED
@@ -0,0 +1,1611 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "<|0.00|>": 50365,
3
+ "<|0.02|>": 50366,
4
+ "<|0.04|>": 50367,
5
+ "<|0.06|>": 50368,
6
+ "<|0.08|>": 50369,
7
+ "<|0.10|>": 50370,
8
+ "<|0.12|>": 50371,
9
+ "<|0.14|>": 50372,
10
+ "<|0.16|>": 50373,
11
+ "<|0.18|>": 50374,
12
+ "<|0.20|>": 50375,
13
+ "<|0.22|>": 50376,
14
+ "<|0.24|>": 50377,
15
+ "<|0.26|>": 50378,
16
+ "<|0.28|>": 50379,
17
+ "<|0.30|>": 50380,
18
+ "<|0.32|>": 50381,
19
+ "<|0.34|>": 50382,
20
+ "<|0.36|>": 50383,
21
+ "<|0.38|>": 50384,
22
+ "<|0.40|>": 50385,
23
+ "<|0.42|>": 50386,
24
+ "<|0.44|>": 50387,
25
+ "<|0.46|>": 50388,
26
+ "<|0.48|>": 50389,
27
+ "<|0.50|>": 50390,
28
+ "<|0.52|>": 50391,
29
+ "<|0.54|>": 50392,
30
+ "<|0.56|>": 50393,
31
+ "<|0.58|>": 50394,
32
+ "<|0.60|>": 50395,
33
+ "<|0.62|>": 50396,
34
+ "<|0.64|>": 50397,
35
+ "<|0.66|>": 50398,
36
+ "<|0.68|>": 50399,
37
+ "<|0.70|>": 50400,
38
+ "<|0.72|>": 50401,
39
+ "<|0.74|>": 50402,
40
+ "<|0.76|>": 50403,
41
+ "<|0.78|>": 50404,
42
+ "<|0.80|>": 50405,
43
+ "<|0.82|>": 50406,
44
+ "<|0.84|>": 50407,
45
+ "<|0.86|>": 50408,
46
+ "<|0.88|>": 50409,
47
+ "<|0.90|>": 50410,
48
+ "<|0.92|>": 50411,
49
+ "<|0.94|>": 50412,
50
+ "<|0.96|>": 50413,
51
+ "<|0.98|>": 50414,
52
+ "<|1.00|>": 50415,
53
+ "<|1.02|>": 50416,
54
+ "<|1.04|>": 50417,
55
+ "<|1.06|>": 50418,
56
+ "<|1.08|>": 50419,
57
+ "<|1.10|>": 50420,
58
+ "<|1.12|>": 50421,
59
+ "<|1.14|>": 50422,
60
+ "<|1.16|>": 50423,
61
+ "<|1.18|>": 50424,
62
+ "<|1.20|>": 50425,
63
+ "<|1.22|>": 50426,
64
+ "<|1.24|>": 50427,
65
+ "<|1.26|>": 50428,
66
+ "<|1.28|>": 50429,
67
+ "<|1.30|>": 50430,
68
+ "<|1.32|>": 50431,
69
+ "<|1.34|>": 50432,
70
+ "<|1.36|>": 50433,
71
+ "<|1.38|>": 50434,
72
+ "<|1.40|>": 50435,
73
+ "<|1.42|>": 50436,
74
+ "<|1.44|>": 50437,
75
+ "<|1.46|>": 50438,
76
+ "<|1.48|>": 50439,
77
+ "<|1.50|>": 50440,
78
+ "<|1.52|>": 50441,
79
+ "<|1.54|>": 50442,
80
+ "<|1.56|>": 50443,
81
+ "<|1.58|>": 50444,
82
+ "<|1.60|>": 50445,
83
+ "<|1.62|>": 50446,
84
+ "<|1.64|>": 50447,
85
+ "<|1.66|>": 50448,
86
+ "<|1.68|>": 50449,
87
+ "<|1.70|>": 50450,
88
+ "<|1.72|>": 50451,
89
+ "<|1.74|>": 50452,
90
+ "<|1.76|>": 50453,
91
+ "<|1.78|>": 50454,
92
+ "<|1.80|>": 50455,
93
+ "<|1.82|>": 50456,
94
+ "<|1.84|>": 50457,
95
+ "<|1.86|>": 50458,
96
+ "<|1.88|>": 50459,
97
+ "<|1.90|>": 50460,
98
+ "<|1.92|>": 50461,
99
+ "<|1.94|>": 50462,
100
+ "<|1.96|>": 50463,
101
+ "<|1.98|>": 50464,
102
+ "<|10.00|>": 50865,
103
+ "<|10.02|>": 50866,
104
+ "<|10.04|>": 50867,
105
+ "<|10.06|>": 50868,
106
+ "<|10.08|>": 50869,
107
+ "<|10.10|>": 50870,
108
+ "<|10.12|>": 50871,
109
+ "<|10.14|>": 50872,
110
+ "<|10.16|>": 50873,
111
+ "<|10.18|>": 50874,
112
+ "<|10.20|>": 50875,
113
+ "<|10.22|>": 50876,
114
+ "<|10.24|>": 50877,
115
+ "<|10.26|>": 50878,
116
+ "<|10.28|>": 50879,
117
+ "<|10.30|>": 50880,
118
+ "<|10.32|>": 50881,
119
+ "<|10.34|>": 50882,
120
+ "<|10.36|>": 50883,
121
+ "<|10.38|>": 50884,
122
+ "<|10.40|>": 50885,
123
+ "<|10.42|>": 50886,
124
+ "<|10.44|>": 50887,
125
+ "<|10.46|>": 50888,
126
+ "<|10.48|>": 50889,
127
+ "<|10.50|>": 50890,
128
+ "<|10.52|>": 50891,
129
+ "<|10.54|>": 50892,
130
+ "<|10.56|>": 50893,
131
+ "<|10.58|>": 50894,
132
+ "<|10.60|>": 50895,
133
+ "<|10.62|>": 50896,
134
+ "<|10.64|>": 50897,
135
+ "<|10.66|>": 50898,
136
+ "<|10.68|>": 50899,
137
+ "<|10.70|>": 50900,
138
+ "<|10.72|>": 50901,
139
+ "<|10.74|>": 50902,
140
+ "<|10.76|>": 50903,
141
+ "<|10.78|>": 50904,
142
+ "<|10.80|>": 50905,
143
+ "<|10.82|>": 50906,
144
+ "<|10.84|>": 50907,
145
+ "<|10.86|>": 50908,
146
+ "<|10.88|>": 50909,
147
+ "<|10.90|>": 50910,
148
+ "<|10.92|>": 50911,
149
+ "<|10.94|>": 50912,
150
+ "<|10.96|>": 50913,
151
+ "<|10.98|>": 50914,
152
+ "<|11.00|>": 50915,
153
+ "<|11.02|>": 50916,
154
+ "<|11.04|>": 50917,
155
+ "<|11.06|>": 50918,
156
+ "<|11.08|>": 50919,
157
+ "<|11.10|>": 50920,
158
+ "<|11.12|>": 50921,
159
+ "<|11.14|>": 50922,
160
+ "<|11.16|>": 50923,
161
+ "<|11.18|>": 50924,
162
+ "<|11.20|>": 50925,
163
+ "<|11.22|>": 50926,
164
+ "<|11.24|>": 50927,
165
+ "<|11.26|>": 50928,
166
+ "<|11.28|>": 50929,
167
+ "<|11.30|>": 50930,
168
+ "<|11.32|>": 50931,
169
+ "<|11.34|>": 50932,
170
+ "<|11.36|>": 50933,
171
+ "<|11.38|>": 50934,
172
+ "<|11.40|>": 50935,
173
+ "<|11.42|>": 50936,
174
+ "<|11.44|>": 50937,
175
+ "<|11.46|>": 50938,
176
+ "<|11.48|>": 50939,
177
+ "<|11.50|>": 50940,
178
+ "<|11.52|>": 50941,
179
+ "<|11.54|>": 50942,
180
+ "<|11.56|>": 50943,
181
+ "<|11.58|>": 50944,
182
+ "<|11.60|>": 50945,
183
+ "<|11.62|>": 50946,
184
+ "<|11.64|>": 50947,
185
+ "<|11.66|>": 50948,
186
+ "<|11.68|>": 50949,
187
+ "<|11.70|>": 50950,
188
+ "<|11.72|>": 50951,
189
+ "<|11.74|>": 50952,
190
+ "<|11.76|>": 50953,
191
+ "<|11.78|>": 50954,
192
+ "<|11.80|>": 50955,
193
+ "<|11.82|>": 50956,
194
+ "<|11.84|>": 50957,
195
+ "<|11.86|>": 50958,
196
+ "<|11.88|>": 50959,
197
+ "<|11.90|>": 50960,
198
+ "<|11.92|>": 50961,
199
+ "<|11.94|>": 50962,
200
+ "<|11.96|>": 50963,
201
+ "<|11.98|>": 50964,
202
+ "<|12.00|>": 50965,
203
+ "<|12.02|>": 50966,
204
+ "<|12.04|>": 50967,
205
+ "<|12.06|>": 50968,
206
+ "<|12.08|>": 50969,
207
+ "<|12.10|>": 50970,
208
+ "<|12.12|>": 50971,
209
+ "<|12.14|>": 50972,
210
+ "<|12.16|>": 50973,
211
+ "<|12.18|>": 50974,
212
+ "<|12.20|>": 50975,
213
+ "<|12.22|>": 50976,
214
+ "<|12.24|>": 50977,
215
+ "<|12.26|>": 50978,
216
+ "<|12.28|>": 50979,
217
+ "<|12.30|>": 50980,
218
+ "<|12.32|>": 50981,
219
+ "<|12.34|>": 50982,
220
+ "<|12.36|>": 50983,
221
+ "<|12.38|>": 50984,
222
+ "<|12.40|>": 50985,
223
+ "<|12.42|>": 50986,
224
+ "<|12.44|>": 50987,
225
+ "<|12.46|>": 50988,
226
+ "<|12.48|>": 50989,
227
+ "<|12.50|>": 50990,
228
+ "<|12.52|>": 50991,
229
+ "<|12.54|>": 50992,
230
+ "<|12.56|>": 50993,
231
+ "<|12.58|>": 50994,
232
+ "<|12.60|>": 50995,
233
+ "<|12.62|>": 50996,
234
+ "<|12.64|>": 50997,
235
+ "<|12.66|>": 50998,
236
+ "<|12.68|>": 50999,
237
+ "<|12.70|>": 51000,
238
+ "<|12.72|>": 51001,
239
+ "<|12.74|>": 51002,
240
+ "<|12.76|>": 51003,
241
+ "<|12.78|>": 51004,
242
+ "<|12.80|>": 51005,
243
+ "<|12.82|>": 51006,
244
+ "<|12.84|>": 51007,
245
+ "<|12.86|>": 51008,
246
+ "<|12.88|>": 51009,
247
+ "<|12.90|>": 51010,
248
+ "<|12.92|>": 51011,
249
+ "<|12.94|>": 51012,
250
+ "<|12.96|>": 51013,
251
+ "<|12.98|>": 51014,
252
+ "<|13.00|>": 51015,
253
+ "<|13.02|>": 51016,
254
+ "<|13.04|>": 51017,
255
+ "<|13.06|>": 51018,
256
+ "<|13.08|>": 51019,
257
+ "<|13.10|>": 51020,
258
+ "<|13.12|>": 51021,
259
+ "<|13.14|>": 51022,
260
+ "<|13.16|>": 51023,
261
+ "<|13.18|>": 51024,
262
+ "<|13.20|>": 51025,
263
+ "<|13.22|>": 51026,
264
+ "<|13.24|>": 51027,
265
+ "<|13.26|>": 51028,
266
+ "<|13.28|>": 51029,
267
+ "<|13.30|>": 51030,
268
+ "<|13.32|>": 51031,
269
+ "<|13.34|>": 51032,
270
+ "<|13.36|>": 51033,
271
+ "<|13.38|>": 51034,
272
+ "<|13.40|>": 51035,
273
+ "<|13.42|>": 51036,
274
+ "<|13.44|>": 51037,
275
+ "<|13.46|>": 51038,
276
+ "<|13.48|>": 51039,
277
+ "<|13.50|>": 51040,
278
+ "<|13.52|>": 51041,
279
+ "<|13.54|>": 51042,
280
+ "<|13.56|>": 51043,
281
+ "<|13.58|>": 51044,
282
+ "<|13.60|>": 51045,
283
+ "<|13.62|>": 51046,
284
+ "<|13.64|>": 51047,
285
+ "<|13.66|>": 51048,
286
+ "<|13.68|>": 51049,
287
+ "<|13.70|>": 51050,
288
+ "<|13.72|>": 51051,
289
+ "<|13.74|>": 51052,
290
+ "<|13.76|>": 51053,
291
+ "<|13.78|>": 51054,
292
+ "<|13.80|>": 51055,
293
+ "<|13.82|>": 51056,
294
+ "<|13.84|>": 51057,
295
+ "<|13.86|>": 51058,
296
+ "<|13.88|>": 51059,
297
+ "<|13.90|>": 51060,
298
+ "<|13.92|>": 51061,
299
+ "<|13.94|>": 51062,
300
+ "<|13.96|>": 51063,
301
+ "<|13.98|>": 51064,
302
+ "<|14.00|>": 51065,
303
+ "<|14.02|>": 51066,
304
+ "<|14.04|>": 51067,
305
+ "<|14.06|>": 51068,
306
+ "<|14.08|>": 51069,
307
+ "<|14.10|>": 51070,
308
+ "<|14.12|>": 51071,
309
+ "<|14.14|>": 51072,
310
+ "<|14.16|>": 51073,
311
+ "<|14.18|>": 51074,
312
+ "<|14.20|>": 51075,
313
+ "<|14.22|>": 51076,
314
+ "<|14.24|>": 51077,
315
+ "<|14.26|>": 51078,
316
+ "<|14.28|>": 51079,
317
+ "<|14.30|>": 51080,
318
+ "<|14.32|>": 51081,
319
+ "<|14.34|>": 51082,
320
+ "<|14.36|>": 51083,
321
+ "<|14.38|>": 51084,
322
+ "<|14.40|>": 51085,
323
+ "<|14.42|>": 51086,
324
+ "<|14.44|>": 51087,
325
+ "<|14.46|>": 51088,
326
+ "<|14.48|>": 51089,
327
+ "<|14.50|>": 51090,
328
+ "<|14.52|>": 51091,
329
+ "<|14.54|>": 51092,
330
+ "<|14.56|>": 51093,
331
+ "<|14.58|>": 51094,
332
+ "<|14.60|>": 51095,
333
+ "<|14.62|>": 51096,
334
+ "<|14.64|>": 51097,
335
+ "<|14.66|>": 51098,
336
+ "<|14.68|>": 51099,
337
+ "<|14.70|>": 51100,
338
+ "<|14.72|>": 51101,
339
+ "<|14.74|>": 51102,
340
+ "<|14.76|>": 51103,
341
+ "<|14.78|>": 51104,
342
+ "<|14.80|>": 51105,
343
+ "<|14.82|>": 51106,
344
+ "<|14.84|>": 51107,
345
+ "<|14.86|>": 51108,
346
+ "<|14.88|>": 51109,
347
+ "<|14.90|>": 51110,
348
+ "<|14.92|>": 51111,
349
+ "<|14.94|>": 51112,
350
+ "<|14.96|>": 51113,
351
+ "<|14.98|>": 51114,
352
+ "<|15.00|>": 51115,
353
+ "<|15.02|>": 51116,
354
+ "<|15.04|>": 51117,
355
+ "<|15.06|>": 51118,
356
+ "<|15.08|>": 51119,
357
+ "<|15.10|>": 51120,
358
+ "<|15.12|>": 51121,
359
+ "<|15.14|>": 51122,
360
+ "<|15.16|>": 51123,
361
+ "<|15.18|>": 51124,
362
+ "<|15.20|>": 51125,
363
+ "<|15.22|>": 51126,
364
+ "<|15.24|>": 51127,
365
+ "<|15.26|>": 51128,
366
+ "<|15.28|>": 51129,
367
+ "<|15.30|>": 51130,
368
+ "<|15.32|>": 51131,
369
+ "<|15.34|>": 51132,
370
+ "<|15.36|>": 51133,
371
+ "<|15.38|>": 51134,
372
+ "<|15.40|>": 51135,
373
+ "<|15.42|>": 51136,
374
+ "<|15.44|>": 51137,
375
+ "<|15.46|>": 51138,
376
+ "<|15.48|>": 51139,
377
+ "<|15.50|>": 51140,
378
+ "<|15.52|>": 51141,
379
+ "<|15.54|>": 51142,
380
+ "<|15.56|>": 51143,
381
+ "<|15.58|>": 51144,
382
+ "<|15.60|>": 51145,
383
+ "<|15.62|>": 51146,
384
+ "<|15.64|>": 51147,
385
+ "<|15.66|>": 51148,
386
+ "<|15.68|>": 51149,
387
+ "<|15.70|>": 51150,
388
+ "<|15.72|>": 51151,
389
+ "<|15.74|>": 51152,
390
+ "<|15.76|>": 51153,
391
+ "<|15.78|>": 51154,
392
+ "<|15.80|>": 51155,
393
+ "<|15.82|>": 51156,
394
+ "<|15.84|>": 51157,
395
+ "<|15.86|>": 51158,
396
+ "<|15.88|>": 51159,
397
+ "<|15.90|>": 51160,
398
+ "<|15.92|>": 51161,
399
+ "<|15.94|>": 51162,
400
+ "<|15.96|>": 51163,
401
+ "<|15.98|>": 51164,
402
+ "<|16.00|>": 51165,
403
+ "<|16.02|>": 51166,
404
+ "<|16.04|>": 51167,
405
+ "<|16.06|>": 51168,
406
+ "<|16.08|>": 51169,
407
+ "<|16.10|>": 51170,
408
+ "<|16.12|>": 51171,
409
+ "<|16.14|>": 51172,
410
+ "<|16.16|>": 51173,
411
+ "<|16.18|>": 51174,
412
+ "<|16.20|>": 51175,
413
+ "<|16.22|>": 51176,
414
+ "<|16.24|>": 51177,
415
+ "<|16.26|>": 51178,
416
+ "<|16.28|>": 51179,
417
+ "<|16.30|>": 51180,
418
+ "<|16.32|>": 51181,
419
+ "<|16.34|>": 51182,
420
+ "<|16.36|>": 51183,
421
+ "<|16.38|>": 51184,
422
+ "<|16.40|>": 51185,
423
+ "<|16.42|>": 51186,
424
+ "<|16.44|>": 51187,
425
+ "<|16.46|>": 51188,
426
+ "<|16.48|>": 51189,
427
+ "<|16.50|>": 51190,
428
+ "<|16.52|>": 51191,
429
+ "<|16.54|>": 51192,
430
+ "<|16.56|>": 51193,
431
+ "<|16.58|>": 51194,
432
+ "<|16.60|>": 51195,
433
+ "<|16.62|>": 51196,
434
+ "<|16.64|>": 51197,
435
+ "<|16.66|>": 51198,
436
+ "<|16.68|>": 51199,
437
+ "<|16.70|>": 51200,
438
+ "<|16.72|>": 51201,
439
+ "<|16.74|>": 51202,
440
+ "<|16.76|>": 51203,
441
+ "<|16.78|>": 51204,
442
+ "<|16.80|>": 51205,
443
+ "<|16.82|>": 51206,
444
+ "<|16.84|>": 51207,
445
+ "<|16.86|>": 51208,
446
+ "<|16.88|>": 51209,
447
+ "<|16.90|>": 51210,
448
+ "<|16.92|>": 51211,
449
+ "<|16.94|>": 51212,
450
+ "<|16.96|>": 51213,
451
+ "<|16.98|>": 51214,
452
+ "<|17.00|>": 51215,
453
+ "<|17.02|>": 51216,
454
+ "<|17.04|>": 51217,
455
+ "<|17.06|>": 51218,
456
+ "<|17.08|>": 51219,
457
+ "<|17.10|>": 51220,
458
+ "<|17.12|>": 51221,
459
+ "<|17.14|>": 51222,
460
+ "<|17.16|>": 51223,
461
+ "<|17.18|>": 51224,
462
+ "<|17.20|>": 51225,
463
+ "<|17.22|>": 51226,
464
+ "<|17.24|>": 51227,
465
+ "<|17.26|>": 51228,
466
+ "<|17.28|>": 51229,
467
+ "<|17.30|>": 51230,
468
+ "<|17.32|>": 51231,
469
+ "<|17.34|>": 51232,
470
+ "<|17.36|>": 51233,
471
+ "<|17.38|>": 51234,
472
+ "<|17.40|>": 51235,
473
+ "<|17.42|>": 51236,
474
+ "<|17.44|>": 51237,
475
+ "<|17.46|>": 51238,
476
+ "<|17.48|>": 51239,
477
+ "<|17.50|>": 51240,
478
+ "<|17.52|>": 51241,
479
+ "<|17.54|>": 51242,
480
+ "<|17.56|>": 51243,
481
+ "<|17.58|>": 51244,
482
+ "<|17.60|>": 51245,
483
+ "<|17.62|>": 51246,
484
+ "<|17.64|>": 51247,
485
+ "<|17.66|>": 51248,
486
+ "<|17.68|>": 51249,
487
+ "<|17.70|>": 51250,
488
+ "<|17.72|>": 51251,
489
+ "<|17.74|>": 51252,
490
+ "<|17.76|>": 51253,
491
+ "<|17.78|>": 51254,
492
+ "<|17.80|>": 51255,
493
+ "<|17.82|>": 51256,
494
+ "<|17.84|>": 51257,
495
+ "<|17.86|>": 51258,
496
+ "<|17.88|>": 51259,
497
+ "<|17.90|>": 51260,
498
+ "<|17.92|>": 51261,
499
+ "<|17.94|>": 51262,
500
+ "<|17.96|>": 51263,
501
+ "<|17.98|>": 51264,
502
+ "<|18.00|>": 51265,
503
+ "<|18.02|>": 51266,
504
+ "<|18.04|>": 51267,
505
+ "<|18.06|>": 51268,
506
+ "<|18.08|>": 51269,
507
+ "<|18.10|>": 51270,
508
+ "<|18.12|>": 51271,
509
+ "<|18.14|>": 51272,
510
+ "<|18.16|>": 51273,
511
+ "<|18.18|>": 51274,
512
+ "<|18.20|>": 51275,
513
+ "<|18.22|>": 51276,
514
+ "<|18.24|>": 51277,
515
+ "<|18.26|>": 51278,
516
+ "<|18.28|>": 51279,
517
+ "<|18.30|>": 51280,
518
+ "<|18.32|>": 51281,
519
+ "<|18.34|>": 51282,
520
+ "<|18.36|>": 51283,
521
+ "<|18.38|>": 51284,
522
+ "<|18.40|>": 51285,
523
+ "<|18.42|>": 51286,
524
+ "<|18.44|>": 51287,
525
+ "<|18.46|>": 51288,
526
+ "<|18.48|>": 51289,
527
+ "<|18.50|>": 51290,
528
+ "<|18.52|>": 51291,
529
+ "<|18.54|>": 51292,
530
+ "<|18.56|>": 51293,
531
+ "<|18.58|>": 51294,
532
+ "<|18.60|>": 51295,
533
+ "<|18.62|>": 51296,
534
+ "<|18.64|>": 51297,
535
+ "<|18.66|>": 51298,
536
+ "<|18.68|>": 51299,
537
+ "<|18.70|>": 51300,
538
+ "<|18.72|>": 51301,
539
+ "<|18.74|>": 51302,
540
+ "<|18.76|>": 51303,
541
+ "<|18.78|>": 51304,
542
+ "<|18.80|>": 51305,
543
+ "<|18.82|>": 51306,
544
+ "<|18.84|>": 51307,
545
+ "<|18.86|>": 51308,
546
+ "<|18.88|>": 51309,
547
+ "<|18.90|>": 51310,
548
+ "<|18.92|>": 51311,
549
+ "<|18.94|>": 51312,
550
+ "<|18.96|>": 51313,
551
+ "<|18.98|>": 51314,
552
+ "<|19.00|>": 51315,
553
+ "<|19.02|>": 51316,
554
+ "<|19.04|>": 51317,
555
+ "<|19.06|>": 51318,
556
+ "<|19.08|>": 51319,
557
+ "<|19.10|>": 51320,
558
+ "<|19.12|>": 51321,
559
+ "<|19.14|>": 51322,
560
+ "<|19.16|>": 51323,
561
+ "<|19.18|>": 51324,
562
+ "<|19.20|>": 51325,
563
+ "<|19.22|>": 51326,
564
+ "<|19.24|>": 51327,
565
+ "<|19.26|>": 51328,
566
+ "<|19.28|>": 51329,
567
+ "<|19.30|>": 51330,
568
+ "<|19.32|>": 51331,
569
+ "<|19.34|>": 51332,
570
+ "<|19.36|>": 51333,
571
+ "<|19.38|>": 51334,
572
+ "<|19.40|>": 51335,
573
+ "<|19.42|>": 51336,
574
+ "<|19.44|>": 51337,
575
+ "<|19.46|>": 51338,
576
+ "<|19.48|>": 51339,
577
+ "<|19.50|>": 51340,
578
+ "<|19.52|>": 51341,
579
+ "<|19.54|>": 51342,
580
+ "<|19.56|>": 51343,
581
+ "<|19.58|>": 51344,
582
+ "<|19.60|>": 51345,
583
+ "<|19.62|>": 51346,
584
+ "<|19.64|>": 51347,
585
+ "<|19.66|>": 51348,
586
+ "<|19.68|>": 51349,
587
+ "<|19.70|>": 51350,
588
+ "<|19.72|>": 51351,
589
+ "<|19.74|>": 51352,
590
+ "<|19.76|>": 51353,
591
+ "<|19.78|>": 51354,
592
+ "<|19.80|>": 51355,
593
+ "<|19.82|>": 51356,
594
+ "<|19.84|>": 51357,
595
+ "<|19.86|>": 51358,
596
+ "<|19.88|>": 51359,
597
+ "<|19.90|>": 51360,
598
+ "<|19.92|>": 51361,
599
+ "<|19.94|>": 51362,
600
+ "<|19.96|>": 51363,
601
+ "<|19.98|>": 51364,
602
+ "<|2.00|>": 50465,
603
+ "<|2.02|>": 50466,
604
+ "<|2.04|>": 50467,
605
+ "<|2.06|>": 50468,
606
+ "<|2.08|>": 50469,
607
+ "<|2.10|>": 50470,
608
+ "<|2.12|>": 50471,
609
+ "<|2.14|>": 50472,
610
+ "<|2.16|>": 50473,
611
+ "<|2.18|>": 50474,
612
+ "<|2.20|>": 50475,
613
+ "<|2.22|>": 50476,
614
+ "<|2.24|>": 50477,
615
+ "<|2.26|>": 50478,
616
+ "<|2.28|>": 50479,
617
+ "<|2.30|>": 50480,
618
+ "<|2.32|>": 50481,
619
+ "<|2.34|>": 50482,
620
+ "<|2.36|>": 50483,
621
+ "<|2.38|>": 50484,
622
+ "<|2.40|>": 50485,
623
+ "<|2.42|>": 50486,
624
+ "<|2.44|>": 50487,
625
+ "<|2.46|>": 50488,
626
+ "<|2.48|>": 50489,
627
+ "<|2.50|>": 50490,
628
+ "<|2.52|>": 50491,
629
+ "<|2.54|>": 50492,
630
+ "<|2.56|>": 50493,
631
+ "<|2.58|>": 50494,
632
+ "<|2.60|>": 50495,
633
+ "<|2.62|>": 50496,
634
+ "<|2.64|>": 50497,
635
+ "<|2.66|>": 50498,
636
+ "<|2.68|>": 50499,
637
+ "<|2.70|>": 50500,
638
+ "<|2.72|>": 50501,
639
+ "<|2.74|>": 50502,
640
+ "<|2.76|>": 50503,
641
+ "<|2.78|>": 50504,
642
+ "<|2.80|>": 50505,
643
+ "<|2.82|>": 50506,
644
+ "<|2.84|>": 50507,
645
+ "<|2.86|>": 50508,
646
+ "<|2.88|>": 50509,
647
+ "<|2.90|>": 50510,
648
+ "<|2.92|>": 50511,
649
+ "<|2.94|>": 50512,
650
+ "<|2.96|>": 50513,
651
+ "<|2.98|>": 50514,
652
+ "<|20.00|>": 51365,
653
+ "<|20.02|>": 51366,
654
+ "<|20.04|>": 51367,
655
+ "<|20.06|>": 51368,
656
+ "<|20.08|>": 51369,
657
+ "<|20.10|>": 51370,
658
+ "<|20.12|>": 51371,
659
+ "<|20.14|>": 51372,
660
+ "<|20.16|>": 51373,
661
+ "<|20.18|>": 51374,
662
+ "<|20.20|>": 51375,
663
+ "<|20.22|>": 51376,
664
+ "<|20.24|>": 51377,
665
+ "<|20.26|>": 51378,
666
+ "<|20.28|>": 51379,
667
+ "<|20.30|>": 51380,
668
+ "<|20.32|>": 51381,
669
+ "<|20.34|>": 51382,
670
+ "<|20.36|>": 51383,
671
+ "<|20.38|>": 51384,
672
+ "<|20.40|>": 51385,
673
+ "<|20.42|>": 51386,
674
+ "<|20.44|>": 51387,
675
+ "<|20.46|>": 51388,
676
+ "<|20.48|>": 51389,
677
+ "<|20.50|>": 51390,
678
+ "<|20.52|>": 51391,
679
+ "<|20.54|>": 51392,
680
+ "<|20.56|>": 51393,
681
+ "<|20.58|>": 51394,
682
+ "<|20.60|>": 51395,
683
+ "<|20.62|>": 51396,
684
+ "<|20.64|>": 51397,
685
+ "<|20.66|>": 51398,
686
+ "<|20.68|>": 51399,
687
+ "<|20.70|>": 51400,
688
+ "<|20.72|>": 51401,
689
+ "<|20.74|>": 51402,
690
+ "<|20.76|>": 51403,
691
+ "<|20.78|>": 51404,
692
+ "<|20.80|>": 51405,
693
+ "<|20.82|>": 51406,
694
+ "<|20.84|>": 51407,
695
+ "<|20.86|>": 51408,
696
+ "<|20.88|>": 51409,
697
+ "<|20.90|>": 51410,
698
+ "<|20.92|>": 51411,
699
+ "<|20.94|>": 51412,
700
+ "<|20.96|>": 51413,
701
+ "<|20.98|>": 51414,
702
+ "<|21.00|>": 51415,
703
+ "<|21.02|>": 51416,
704
+ "<|21.04|>": 51417,
705
+ "<|21.06|>": 51418,
706
+ "<|21.08|>": 51419,
707
+ "<|21.10|>": 51420,
708
+ "<|21.12|>": 51421,
709
+ "<|21.14|>": 51422,
710
+ "<|21.16|>": 51423,
711
+ "<|21.18|>": 51424,
712
+ "<|21.20|>": 51425,
713
+ "<|21.22|>": 51426,
714
+ "<|21.24|>": 51427,
715
+ "<|21.26|>": 51428,
716
+ "<|21.28|>": 51429,
717
+ "<|21.30|>": 51430,
718
+ "<|21.32|>": 51431,
719
+ "<|21.34|>": 51432,
720
+ "<|21.36|>": 51433,
721
+ "<|21.38|>": 51434,
722
+ "<|21.40|>": 51435,
723
+ "<|21.42|>": 51436,
724
+ "<|21.44|>": 51437,
725
+ "<|21.46|>": 51438,
726
+ "<|21.48|>": 51439,
727
+ "<|21.50|>": 51440,
728
+ "<|21.52|>": 51441,
729
+ "<|21.54|>": 51442,
730
+ "<|21.56|>": 51443,
731
+ "<|21.58|>": 51444,
732
+ "<|21.60|>": 51445,
733
+ "<|21.62|>": 51446,
734
+ "<|21.64|>": 51447,
735
+ "<|21.66|>": 51448,
736
+ "<|21.68|>": 51449,
737
+ "<|21.70|>": 51450,
738
+ "<|21.72|>": 51451,
739
+ "<|21.74|>": 51452,
740
+ "<|21.76|>": 51453,
741
+ "<|21.78|>": 51454,
742
+ "<|21.80|>": 51455,
743
+ "<|21.82|>": 51456,
744
+ "<|21.84|>": 51457,
745
+ "<|21.86|>": 51458,
746
+ "<|21.88|>": 51459,
747
+ "<|21.90|>": 51460,
748
+ "<|21.92|>": 51461,
749
+ "<|21.94|>": 51462,
750
+ "<|21.96|>": 51463,
751
+ "<|21.98|>": 51464,
752
+ "<|22.00|>": 51465,
753
+ "<|22.02|>": 51466,
754
+ "<|22.04|>": 51467,
755
+ "<|22.06|>": 51468,
756
+ "<|22.08|>": 51469,
757
+ "<|22.10|>": 51470,
758
+ "<|22.12|>": 51471,
759
+ "<|22.14|>": 51472,
760
+ "<|22.16|>": 51473,
761
+ "<|22.18|>": 51474,
762
+ "<|22.20|>": 51475,
763
+ "<|22.22|>": 51476,
764
+ "<|22.24|>": 51477,
765
+ "<|22.26|>": 51478,
766
+ "<|22.28|>": 51479,
767
+ "<|22.30|>": 51480,
768
+ "<|22.32|>": 51481,
769
+ "<|22.34|>": 51482,
770
+ "<|22.36|>": 51483,
771
+ "<|22.38|>": 51484,
772
+ "<|22.40|>": 51485,
773
+ "<|22.42|>": 51486,
774
+ "<|22.44|>": 51487,
775
+ "<|22.46|>": 51488,
776
+ "<|22.48|>": 51489,
777
+ "<|22.50|>": 51490,
778
+ "<|22.52|>": 51491,
779
+ "<|22.54|>": 51492,
780
+ "<|22.56|>": 51493,
781
+ "<|22.58|>": 51494,
782
+ "<|22.60|>": 51495,
783
+ "<|22.62|>": 51496,
784
+ "<|22.64|>": 51497,
785
+ "<|22.66|>": 51498,
786
+ "<|22.68|>": 51499,
787
+ "<|22.70|>": 51500,
788
+ "<|22.72|>": 51501,
789
+ "<|22.74|>": 51502,
790
+ "<|22.76|>": 51503,
791
+ "<|22.78|>": 51504,
792
+ "<|22.80|>": 51505,
793
+ "<|22.82|>": 51506,
794
+ "<|22.84|>": 51507,
795
+ "<|22.86|>": 51508,
796
+ "<|22.88|>": 51509,
797
+ "<|22.90|>": 51510,
798
+ "<|22.92|>": 51511,
799
+ "<|22.94|>": 51512,
800
+ "<|22.96|>": 51513,
801
+ "<|22.98|>": 51514,
802
+ "<|23.00|>": 51515,
803
+ "<|23.02|>": 51516,
804
+ "<|23.04|>": 51517,
805
+ "<|23.06|>": 51518,
806
+ "<|23.08|>": 51519,
807
+ "<|23.10|>": 51520,
808
+ "<|23.12|>": 51521,
809
+ "<|23.14|>": 51522,
810
+ "<|23.16|>": 51523,
811
+ "<|23.18|>": 51524,
812
+ "<|23.20|>": 51525,
813
+ "<|23.22|>": 51526,
814
+ "<|23.24|>": 51527,
815
+ "<|23.26|>": 51528,
816
+ "<|23.28|>": 51529,
817
+ "<|23.30|>": 51530,
818
+ "<|23.32|>": 51531,
819
+ "<|23.34|>": 51532,
820
+ "<|23.36|>": 51533,
821
+ "<|23.38|>": 51534,
822
+ "<|23.40|>": 51535,
823
+ "<|23.42|>": 51536,
824
+ "<|23.44|>": 51537,
825
+ "<|23.46|>": 51538,
826
+ "<|23.48|>": 51539,
827
+ "<|23.50|>": 51540,
828
+ "<|23.52|>": 51541,
829
+ "<|23.54|>": 51542,
830
+ "<|23.56|>": 51543,
831
+ "<|23.58|>": 51544,
832
+ "<|23.60|>": 51545,
833
+ "<|23.62|>": 51546,
834
+ "<|23.64|>": 51547,
835
+ "<|23.66|>": 51548,
836
+ "<|23.68|>": 51549,
837
+ "<|23.70|>": 51550,
838
+ "<|23.72|>": 51551,
839
+ "<|23.74|>": 51552,
840
+ "<|23.76|>": 51553,
841
+ "<|23.78|>": 51554,
842
+ "<|23.80|>": 51555,
843
+ "<|23.82|>": 51556,
844
+ "<|23.84|>": 51557,
845
+ "<|23.86|>": 51558,
846
+ "<|23.88|>": 51559,
847
+ "<|23.90|>": 51560,
848
+ "<|23.92|>": 51561,
849
+ "<|23.94|>": 51562,
850
+ "<|23.96|>": 51563,
851
+ "<|23.98|>": 51564,
852
+ "<|24.00|>": 51565,
853
+ "<|24.02|>": 51566,
854
+ "<|24.04|>": 51567,
855
+ "<|24.06|>": 51568,
856
+ "<|24.08|>": 51569,
857
+ "<|24.10|>": 51570,
858
+ "<|24.12|>": 51571,
859
+ "<|24.14|>": 51572,
860
+ "<|24.16|>": 51573,
861
+ "<|24.18|>": 51574,
862
+ "<|24.20|>": 51575,
863
+ "<|24.22|>": 51576,
864
+ "<|24.24|>": 51577,
865
+ "<|24.26|>": 51578,
866
+ "<|24.28|>": 51579,
867
+ "<|24.30|>": 51580,
868
+ "<|24.32|>": 51581,
869
+ "<|24.34|>": 51582,
870
+ "<|24.36|>": 51583,
871
+ "<|24.38|>": 51584,
872
+ "<|24.40|>": 51585,
873
+ "<|24.42|>": 51586,
874
+ "<|24.44|>": 51587,
875
+ "<|24.46|>": 51588,
876
+ "<|24.48|>": 51589,
877
+ "<|24.50|>": 51590,
878
+ "<|24.52|>": 51591,
879
+ "<|24.54|>": 51592,
880
+ "<|24.56|>": 51593,
881
+ "<|24.58|>": 51594,
882
+ "<|24.60|>": 51595,
883
+ "<|24.62|>": 51596,
884
+ "<|24.64|>": 51597,
885
+ "<|24.66|>": 51598,
886
+ "<|24.68|>": 51599,
887
+ "<|24.70|>": 51600,
888
+ "<|24.72|>": 51601,
889
+ "<|24.74|>": 51602,
890
+ "<|24.76|>": 51603,
891
+ "<|24.78|>": 51604,
892
+ "<|24.80|>": 51605,
893
+ "<|24.82|>": 51606,
894
+ "<|24.84|>": 51607,
895
+ "<|24.86|>": 51608,
896
+ "<|24.88|>": 51609,
897
+ "<|24.90|>": 51610,
898
+ "<|24.92|>": 51611,
899
+ "<|24.94|>": 51612,
900
+ "<|24.96|>": 51613,
901
+ "<|24.98|>": 51614,
902
+ "<|25.00|>": 51615,
903
+ "<|25.02|>": 51616,
904
+ "<|25.04|>": 51617,
905
+ "<|25.06|>": 51618,
906
+ "<|25.08|>": 51619,
907
+ "<|25.10|>": 51620,
908
+ "<|25.12|>": 51621,
909
+ "<|25.14|>": 51622,
910
+ "<|25.16|>": 51623,
911
+ "<|25.18|>": 51624,
912
+ "<|25.20|>": 51625,
913
+ "<|25.22|>": 51626,
914
+ "<|25.24|>": 51627,
915
+ "<|25.26|>": 51628,
916
+ "<|25.28|>": 51629,
917
+ "<|25.30|>": 51630,
918
+ "<|25.32|>": 51631,
919
+ "<|25.34|>": 51632,
920
+ "<|25.36|>": 51633,
921
+ "<|25.38|>": 51634,
922
+ "<|25.40|>": 51635,
923
+ "<|25.42|>": 51636,
924
+ "<|25.44|>": 51637,
925
+ "<|25.46|>": 51638,
926
+ "<|25.48|>": 51639,
927
+ "<|25.50|>": 51640,
928
+ "<|25.52|>": 51641,
929
+ "<|25.54|>": 51642,
930
+ "<|25.56|>": 51643,
931
+ "<|25.58|>": 51644,
932
+ "<|25.60|>": 51645,
933
+ "<|25.62|>": 51646,
934
+ "<|25.64|>": 51647,
935
+ "<|25.66|>": 51648,
936
+ "<|25.68|>": 51649,
937
+ "<|25.70|>": 51650,
938
+ "<|25.72|>": 51651,
939
+ "<|25.74|>": 51652,
940
+ "<|25.76|>": 51653,
941
+ "<|25.78|>": 51654,
942
+ "<|25.80|>": 51655,
943
+ "<|25.82|>": 51656,
944
+ "<|25.84|>": 51657,
945
+ "<|25.86|>": 51658,
946
+ "<|25.88|>": 51659,
947
+ "<|25.90|>": 51660,
948
+ "<|25.92|>": 51661,
949
+ "<|25.94|>": 51662,
950
+ "<|25.96|>": 51663,
951
+ "<|25.98|>": 51664,
952
+ "<|26.00|>": 51665,
953
+ "<|26.02|>": 51666,
954
+ "<|26.04|>": 51667,
955
+ "<|26.06|>": 51668,
956
+ "<|26.08|>": 51669,
957
+ "<|26.10|>": 51670,
958
+ "<|26.12|>": 51671,
959
+ "<|26.14|>": 51672,
960
+ "<|26.16|>": 51673,
961
+ "<|26.18|>": 51674,
962
+ "<|26.20|>": 51675,
963
+ "<|26.22|>": 51676,
964
+ "<|26.24|>": 51677,
965
+ "<|26.26|>": 51678,
966
+ "<|26.28|>": 51679,
967
+ "<|26.30|>": 51680,
968
+ "<|26.32|>": 51681,
969
+ "<|26.34|>": 51682,
970
+ "<|26.36|>": 51683,
971
+ "<|26.38|>": 51684,
972
+ "<|26.40|>": 51685,
973
+ "<|26.42|>": 51686,
974
+ "<|26.44|>": 51687,
975
+ "<|26.46|>": 51688,
976
+ "<|26.48|>": 51689,
977
+ "<|26.50|>": 51690,
978
+ "<|26.52|>": 51691,
979
+ "<|26.54|>": 51692,
980
+ "<|26.56|>": 51693,
981
+ "<|26.58|>": 51694,
982
+ "<|26.60|>": 51695,
983
+ "<|26.62|>": 51696,
984
+ "<|26.64|>": 51697,
985
+ "<|26.66|>": 51698,
986
+ "<|26.68|>": 51699,
987
+ "<|26.70|>": 51700,
988
+ "<|26.72|>": 51701,
989
+ "<|26.74|>": 51702,
990
+ "<|26.76|>": 51703,
991
+ "<|26.78|>": 51704,
992
+ "<|26.80|>": 51705,
993
+ "<|26.82|>": 51706,
994
+ "<|26.84|>": 51707,
995
+ "<|26.86|>": 51708,
996
+ "<|26.88|>": 51709,
997
+ "<|26.90|>": 51710,
998
+ "<|26.92|>": 51711,
999
+ "<|26.94|>": 51712,
1000
+ "<|26.96|>": 51713,
1001
+ "<|26.98|>": 51714,
1002
+ "<|27.00|>": 51715,
1003
+ "<|27.02|>": 51716,
1004
+ "<|27.04|>": 51717,
1005
+ "<|27.06|>": 51718,
1006
+ "<|27.08|>": 51719,
1007
+ "<|27.10|>": 51720,
1008
+ "<|27.12|>": 51721,
1009
+ "<|27.14|>": 51722,
1010
+ "<|27.16|>": 51723,
1011
+ "<|27.18|>": 51724,
1012
+ "<|27.20|>": 51725,
1013
+ "<|27.22|>": 51726,
1014
+ "<|27.24|>": 51727,
1015
+ "<|27.26|>": 51728,
1016
+ "<|27.28|>": 51729,
1017
+ "<|27.30|>": 51730,
1018
+ "<|27.32|>": 51731,
1019
+ "<|27.34|>": 51732,
1020
+ "<|27.36|>": 51733,
1021
+ "<|27.38|>": 51734,
1022
+ "<|27.40|>": 51735,
1023
+ "<|27.42|>": 51736,
1024
+ "<|27.44|>": 51737,
1025
+ "<|27.46|>": 51738,
1026
+ "<|27.48|>": 51739,
1027
+ "<|27.50|>": 51740,
1028
+ "<|27.52|>": 51741,
1029
+ "<|27.54|>": 51742,
1030
+ "<|27.56|>": 51743,
1031
+ "<|27.58|>": 51744,
1032
+ "<|27.60|>": 51745,
1033
+ "<|27.62|>": 51746,
1034
+ "<|27.64|>": 51747,
1035
+ "<|27.66|>": 51748,
1036
+ "<|27.68|>": 51749,
1037
+ "<|27.70|>": 51750,
1038
+ "<|27.72|>": 51751,
1039
+ "<|27.74|>": 51752,
1040
+ "<|27.76|>": 51753,
1041
+ "<|27.78|>": 51754,
1042
+ "<|27.80|>": 51755,
1043
+ "<|27.82|>": 51756,
1044
+ "<|27.84|>": 51757,
1045
+ "<|27.86|>": 51758,
1046
+ "<|27.88|>": 51759,
1047
+ "<|27.90|>": 51760,
1048
+ "<|27.92|>": 51761,
1049
+ "<|27.94|>": 51762,
1050
+ "<|27.96|>": 51763,
1051
+ "<|27.98|>": 51764,
1052
+ "<|28.00|>": 51765,
1053
+ "<|28.02|>": 51766,
1054
+ "<|28.04|>": 51767,
1055
+ "<|28.06|>": 51768,
1056
+ "<|28.08|>": 51769,
1057
+ "<|28.10|>": 51770,
1058
+ "<|28.12|>": 51771,
1059
+ "<|28.14|>": 51772,
1060
+ "<|28.16|>": 51773,
1061
+ "<|28.18|>": 51774,
1062
+ "<|28.20|>": 51775,
1063
+ "<|28.22|>": 51776,
1064
+ "<|28.24|>": 51777,
1065
+ "<|28.26|>": 51778,
1066
+ "<|28.28|>": 51779,
1067
+ "<|28.30|>": 51780,
1068
+ "<|28.32|>": 51781,
1069
+ "<|28.34|>": 51782,
1070
+ "<|28.36|>": 51783,
1071
+ "<|28.38|>": 51784,
1072
+ "<|28.40|>": 51785,
1073
+ "<|28.42|>": 51786,
1074
+ "<|28.44|>": 51787,
1075
+ "<|28.46|>": 51788,
1076
+ "<|28.48|>": 51789,
1077
+ "<|28.50|>": 51790,
1078
+ "<|28.52|>": 51791,
1079
+ "<|28.54|>": 51792,
1080
+ "<|28.56|>": 51793,
1081
+ "<|28.58|>": 51794,
1082
+ "<|28.60|>": 51795,
1083
+ "<|28.62|>": 51796,
1084
+ "<|28.64|>": 51797,
1085
+ "<|28.66|>": 51798,
1086
+ "<|28.68|>": 51799,
1087
+ "<|28.70|>": 51800,
1088
+ "<|28.72|>": 51801,
1089
+ "<|28.74|>": 51802,
1090
+ "<|28.76|>": 51803,
1091
+ "<|28.78|>": 51804,
1092
+ "<|28.80|>": 51805,
1093
+ "<|28.82|>": 51806,
1094
+ "<|28.84|>": 51807,
1095
+ "<|28.86|>": 51808,
1096
+ "<|28.88|>": 51809,
1097
+ "<|28.90|>": 51810,
1098
+ "<|28.92|>": 51811,
1099
+ "<|28.94|>": 51812,
1100
+ "<|28.96|>": 51813,
1101
+ "<|28.98|>": 51814,
1102
+ "<|29.00|>": 51815,
1103
+ "<|29.02|>": 51816,
1104
+ "<|29.04|>": 51817,
1105
+ "<|29.06|>": 51818,
1106
+ "<|29.08|>": 51819,
1107
+ "<|29.10|>": 51820,
1108
+ "<|29.12|>": 51821,
1109
+ "<|29.14|>": 51822,
1110
+ "<|29.16|>": 51823,
1111
+ "<|29.18|>": 51824,
1112
+ "<|29.20|>": 51825,
1113
+ "<|29.22|>": 51826,
1114
+ "<|29.24|>": 51827,
1115
+ "<|29.26|>": 51828,
1116
+ "<|29.28|>": 51829,
1117
+ "<|29.30|>": 51830,
1118
+ "<|29.32|>": 51831,
1119
+ "<|29.34|>": 51832,
1120
+ "<|29.36|>": 51833,
1121
+ "<|29.38|>": 51834,
1122
+ "<|29.40|>": 51835,
1123
+ "<|29.42|>": 51836,
1124
+ "<|29.44|>": 51837,
1125
+ "<|29.46|>": 51838,
1126
+ "<|29.48|>": 51839,
1127
+ "<|29.50|>": 51840,
1128
+ "<|29.52|>": 51841,
1129
+ "<|29.54|>": 51842,
1130
+ "<|29.56|>": 51843,
1131
+ "<|29.58|>": 51844,
1132
+ "<|29.60|>": 51845,
1133
+ "<|29.62|>": 51846,
1134
+ "<|29.64|>": 51847,
1135
+ "<|29.66|>": 51848,
1136
+ "<|29.68|>": 51849,
1137
+ "<|29.70|>": 51850,
1138
+ "<|29.72|>": 51851,
1139
+ "<|29.74|>": 51852,
1140
+ "<|29.76|>": 51853,
1141
+ "<|29.78|>": 51854,
1142
+ "<|29.80|>": 51855,
1143
+ "<|29.82|>": 51856,
1144
+ "<|29.84|>": 51857,
1145
+ "<|29.86|>": 51858,
1146
+ "<|29.88|>": 51859,
1147
+ "<|29.90|>": 51860,
1148
+ "<|29.92|>": 51861,
1149
+ "<|29.94|>": 51862,
1150
+ "<|29.96|>": 51863,
1151
+ "<|29.98|>": 51864,
1152
+ "<|3.00|>": 50515,
1153
+ "<|3.02|>": 50516,
1154
+ "<|3.04|>": 50517,
1155
+ "<|3.06|>": 50518,
1156
+ "<|3.08|>": 50519,
1157
+ "<|3.10|>": 50520,
1158
+ "<|3.12|>": 50521,
1159
+ "<|3.14|>": 50522,
1160
+ "<|3.16|>": 50523,
1161
+ "<|3.18|>": 50524,
1162
+ "<|3.20|>": 50525,
1163
+ "<|3.22|>": 50526,
1164
+ "<|3.24|>": 50527,
1165
+ "<|3.26|>": 50528,
1166
+ "<|3.28|>": 50529,
1167
+ "<|3.30|>": 50530,
1168
+ "<|3.32|>": 50531,
1169
+ "<|3.34|>": 50532,
1170
+ "<|3.36|>": 50533,
1171
+ "<|3.38|>": 50534,
1172
+ "<|3.40|>": 50535,
1173
+ "<|3.42|>": 50536,
1174
+ "<|3.44|>": 50537,
1175
+ "<|3.46|>": 50538,
1176
+ "<|3.48|>": 50539,
1177
+ "<|3.50|>": 50540,
1178
+ "<|3.52|>": 50541,
1179
+ "<|3.54|>": 50542,
1180
+ "<|3.56|>": 50543,
1181
+ "<|3.58|>": 50544,
1182
+ "<|3.60|>": 50545,
1183
+ "<|3.62|>": 50546,
1184
+ "<|3.64|>": 50547,
1185
+ "<|3.66|>": 50548,
1186
+ "<|3.68|>": 50549,
1187
+ "<|3.70|>": 50550,
1188
+ "<|3.72|>": 50551,
1189
+ "<|3.74|>": 50552,
1190
+ "<|3.76|>": 50553,
1191
+ "<|3.78|>": 50554,
1192
+ "<|3.80|>": 50555,
1193
+ "<|3.82|>": 50556,
1194
+ "<|3.84|>": 50557,
1195
+ "<|3.86|>": 50558,
1196
+ "<|3.88|>": 50559,
1197
+ "<|3.90|>": 50560,
1198
+ "<|3.92|>": 50561,
1199
+ "<|3.94|>": 50562,
1200
+ "<|3.96|>": 50563,
1201
+ "<|3.98|>": 50564,
1202
+ "<|30.00|>": 51865,
1203
+ "<|4.00|>": 50565,
1204
+ "<|4.02|>": 50566,
1205
+ "<|4.04|>": 50567,
1206
+ "<|4.06|>": 50568,
1207
+ "<|4.08|>": 50569,
1208
+ "<|4.10|>": 50570,
1209
+ "<|4.12|>": 50571,
1210
+ "<|4.14|>": 50572,
1211
+ "<|4.16|>": 50573,
1212
+ "<|4.18|>": 50574,
1213
+ "<|4.20|>": 50575,
1214
+ "<|4.22|>": 50576,
1215
+ "<|4.24|>": 50577,
1216
+ "<|4.26|>": 50578,
1217
+ "<|4.28|>": 50579,
1218
+ "<|4.30|>": 50580,
1219
+ "<|4.32|>": 50581,
1220
+ "<|4.34|>": 50582,
1221
+ "<|4.36|>": 50583,
1222
+ "<|4.38|>": 50584,
1223
+ "<|4.40|>": 50585,
1224
+ "<|4.42|>": 50586,
1225
+ "<|4.44|>": 50587,
1226
+ "<|4.46|>": 50588,
1227
+ "<|4.48|>": 50589,
1228
+ "<|4.50|>": 50590,
1229
+ "<|4.52|>": 50591,
1230
+ "<|4.54|>": 50592,
1231
+ "<|4.56|>": 50593,
1232
+ "<|4.58|>": 50594,
1233
+ "<|4.60|>": 50595,
1234
+ "<|4.62|>": 50596,
1235
+ "<|4.64|>": 50597,
1236
+ "<|4.66|>": 50598,
1237
+ "<|4.68|>": 50599,
1238
+ "<|4.70|>": 50600,
1239
+ "<|4.72|>": 50601,
1240
+ "<|4.74|>": 50602,
1241
+ "<|4.76|>": 50603,
1242
+ "<|4.78|>": 50604,
1243
+ "<|4.80|>": 50605,
1244
+ "<|4.82|>": 50606,
1245
+ "<|4.84|>": 50607,
1246
+ "<|4.86|>": 50608,
1247
+ "<|4.88|>": 50609,
1248
+ "<|4.90|>": 50610,
1249
+ "<|4.92|>": 50611,
1250
+ "<|4.94|>": 50612,
1251
+ "<|4.96|>": 50613,
1252
+ "<|4.98|>": 50614,
1253
+ "<|5.00|>": 50615,
1254
+ "<|5.02|>": 50616,
1255
+ "<|5.04|>": 50617,
1256
+ "<|5.06|>": 50618,
1257
+ "<|5.08|>": 50619,
1258
+ "<|5.10|>": 50620,
1259
+ "<|5.12|>": 50621,
1260
+ "<|5.14|>": 50622,
1261
+ "<|5.16|>": 50623,
1262
+ "<|5.18|>": 50624,
1263
+ "<|5.20|>": 50625,
1264
+ "<|5.22|>": 50626,
1265
+ "<|5.24|>": 50627,
1266
+ "<|5.26|>": 50628,
1267
+ "<|5.28|>": 50629,
1268
+ "<|5.30|>": 50630,
1269
+ "<|5.32|>": 50631,
1270
+ "<|5.34|>": 50632,
1271
+ "<|5.36|>": 50633,
1272
+ "<|5.38|>": 50634,
1273
+ "<|5.40|>": 50635,
1274
+ "<|5.42|>": 50636,
1275
+ "<|5.44|>": 50637,
1276
+ "<|5.46|>": 50638,
1277
+ "<|5.48|>": 50639,
1278
+ "<|5.50|>": 50640,
1279
+ "<|5.52|>": 50641,
1280
+ "<|5.54|>": 50642,
1281
+ "<|5.56|>": 50643,
1282
+ "<|5.58|>": 50644,
1283
+ "<|5.60|>": 50645,
1284
+ "<|5.62|>": 50646,
1285
+ "<|5.64|>": 50647,
1286
+ "<|5.66|>": 50648,
1287
+ "<|5.68|>": 50649,
1288
+ "<|5.70|>": 50650,
1289
+ "<|5.72|>": 50651,
1290
+ "<|5.74|>": 50652,
1291
+ "<|5.76|>": 50653,
1292
+ "<|5.78|>": 50654,
1293
+ "<|5.80|>": 50655,
1294
+ "<|5.82|>": 50656,
1295
+ "<|5.84|>": 50657,
1296
+ "<|5.86|>": 50658,
1297
+ "<|5.88|>": 50659,
1298
+ "<|5.90|>": 50660,
1299
+ "<|5.92|>": 50661,
1300
+ "<|5.94|>": 50662,
1301
+ "<|5.96|>": 50663,
1302
+ "<|5.98|>": 50664,
1303
+ "<|6.00|>": 50665,
1304
+ "<|6.02|>": 50666,
1305
+ "<|6.04|>": 50667,
1306
+ "<|6.06|>": 50668,
1307
+ "<|6.08|>": 50669,
1308
+ "<|6.10|>": 50670,
1309
+ "<|6.12|>": 50671,
1310
+ "<|6.14|>": 50672,
1311
+ "<|6.16|>": 50673,
1312
+ "<|6.18|>": 50674,
1313
+ "<|6.20|>": 50675,
1314
+ "<|6.22|>": 50676,
1315
+ "<|6.24|>": 50677,
1316
+ "<|6.26|>": 50678,
1317
+ "<|6.28|>": 50679,
1318
+ "<|6.30|>": 50680,
1319
+ "<|6.32|>": 50681,
1320
+ "<|6.34|>": 50682,
1321
+ "<|6.36|>": 50683,
1322
+ "<|6.38|>": 50684,
1323
+ "<|6.40|>": 50685,
1324
+ "<|6.42|>": 50686,
1325
+ "<|6.44|>": 50687,
1326
+ "<|6.46|>": 50688,
1327
+ "<|6.48|>": 50689,
1328
+ "<|6.50|>": 50690,
1329
+ "<|6.52|>": 50691,
1330
+ "<|6.54|>": 50692,
1331
+ "<|6.56|>": 50693,
1332
+ "<|6.58|>": 50694,
1333
+ "<|6.60|>": 50695,
1334
+ "<|6.62|>": 50696,
1335
+ "<|6.64|>": 50697,
1336
+ "<|6.66|>": 50698,
1337
+ "<|6.68|>": 50699,
1338
+ "<|6.70|>": 50700,
1339
+ "<|6.72|>": 50701,
1340
+ "<|6.74|>": 50702,
1341
+ "<|6.76|>": 50703,
1342
+ "<|6.78|>": 50704,
1343
+ "<|6.80|>": 50705,
1344
+ "<|6.82|>": 50706,
1345
+ "<|6.84|>": 50707,
1346
+ "<|6.86|>": 50708,
1347
+ "<|6.88|>": 50709,
1348
+ "<|6.90|>": 50710,
1349
+ "<|6.92|>": 50711,
1350
+ "<|6.94|>": 50712,
1351
+ "<|6.96|>": 50713,
1352
+ "<|6.98|>": 50714,
1353
+ "<|7.00|>": 50715,
1354
+ "<|7.02|>": 50716,
1355
+ "<|7.04|>": 50717,
1356
+ "<|7.06|>": 50718,
1357
+ "<|7.08|>": 50719,
1358
+ "<|7.10|>": 50720,
1359
+ "<|7.12|>": 50721,
1360
+ "<|7.14|>": 50722,
1361
+ "<|7.16|>": 50723,
1362
+ "<|7.18|>": 50724,
1363
+ "<|7.20|>": 50725,
1364
+ "<|7.22|>": 50726,
1365
+ "<|7.24|>": 50727,
1366
+ "<|7.26|>": 50728,
1367
+ "<|7.28|>": 50729,
1368
+ "<|7.30|>": 50730,
1369
+ "<|7.32|>": 50731,
1370
+ "<|7.34|>": 50732,
1371
+ "<|7.36|>": 50733,
1372
+ "<|7.38|>": 50734,
1373
+ "<|7.40|>": 50735,
1374
+ "<|7.42|>": 50736,
1375
+ "<|7.44|>": 50737,
1376
+ "<|7.46|>": 50738,
1377
+ "<|7.48|>": 50739,
1378
+ "<|7.50|>": 50740,
1379
+ "<|7.52|>": 50741,
1380
+ "<|7.54|>": 50742,
1381
+ "<|7.56|>": 50743,
1382
+ "<|7.58|>": 50744,
1383
+ "<|7.60|>": 50745,
1384
+ "<|7.62|>": 50746,
1385
+ "<|7.64|>": 50747,
1386
+ "<|7.66|>": 50748,
1387
+ "<|7.68|>": 50749,
1388
+ "<|7.70|>": 50750,
1389
+ "<|7.72|>": 50751,
1390
+ "<|7.74|>": 50752,
1391
+ "<|7.76|>": 50753,
1392
+ "<|7.78|>": 50754,
1393
+ "<|7.80|>": 50755,
1394
+ "<|7.82|>": 50756,
1395
+ "<|7.84|>": 50757,
1396
+ "<|7.86|>": 50758,
1397
+ "<|7.88|>": 50759,
1398
+ "<|7.90|>": 50760,
1399
+ "<|7.92|>": 50761,
1400
+ "<|7.94|>": 50762,
1401
+ "<|7.96|>": 50763,
1402
+ "<|7.98|>": 50764,
1403
+ "<|8.00|>": 50765,
1404
+ "<|8.02|>": 50766,
1405
+ "<|8.04|>": 50767,
1406
+ "<|8.06|>": 50768,
1407
+ "<|8.08|>": 50769,
1408
+ "<|8.10|>": 50770,
1409
+ "<|8.12|>": 50771,
1410
+ "<|8.14|>": 50772,
1411
+ "<|8.16|>": 50773,
1412
+ "<|8.18|>": 50774,
1413
+ "<|8.20|>": 50775,
1414
+ "<|8.22|>": 50776,
1415
+ "<|8.24|>": 50777,
1416
+ "<|8.26|>": 50778,
1417
+ "<|8.28|>": 50779,
1418
+ "<|8.30|>": 50780,
1419
+ "<|8.32|>": 50781,
1420
+ "<|8.34|>": 50782,
1421
+ "<|8.36|>": 50783,
1422
+ "<|8.38|>": 50784,
1423
+ "<|8.40|>": 50785,
1424
+ "<|8.42|>": 50786,
1425
+ "<|8.44|>": 50787,
1426
+ "<|8.46|>": 50788,
1427
+ "<|8.48|>": 50789,
1428
+ "<|8.50|>": 50790,
1429
+ "<|8.52|>": 50791,
1430
+ "<|8.54|>": 50792,
1431
+ "<|8.56|>": 50793,
1432
+ "<|8.58|>": 50794,
1433
+ "<|8.60|>": 50795,
1434
+ "<|8.62|>": 50796,
1435
+ "<|8.64|>": 50797,
1436
+ "<|8.66|>": 50798,
1437
+ "<|8.68|>": 50799,
1438
+ "<|8.70|>": 50800,
1439
+ "<|8.72|>": 50801,
1440
+ "<|8.74|>": 50802,
1441
+ "<|8.76|>": 50803,
1442
+ "<|8.78|>": 50804,
1443
+ "<|8.80|>": 50805,
1444
+ "<|8.82|>": 50806,
1445
+ "<|8.84|>": 50807,
1446
+ "<|8.86|>": 50808,
1447
+ "<|8.88|>": 50809,
1448
+ "<|8.90|>": 50810,
1449
+ "<|8.92|>": 50811,
1450
+ "<|8.94|>": 50812,
1451
+ "<|8.96|>": 50813,
1452
+ "<|8.98|>": 50814,
1453
+ "<|9.00|>": 50815,
1454
+ "<|9.02|>": 50816,
1455
+ "<|9.04|>": 50817,
1456
+ "<|9.06|>": 50818,
1457
+ "<|9.08|>": 50819,
1458
+ "<|9.10|>": 50820,
1459
+ "<|9.12|>": 50821,
1460
+ "<|9.14|>": 50822,
1461
+ "<|9.16|>": 50823,
1462
+ "<|9.18|>": 50824,
1463
+ "<|9.20|>": 50825,
1464
+ "<|9.22|>": 50826,
1465
+ "<|9.24|>": 50827,
1466
+ "<|9.26|>": 50828,
1467
+ "<|9.28|>": 50829,
1468
+ "<|9.30|>": 50830,
1469
+ "<|9.32|>": 50831,
1470
+ "<|9.34|>": 50832,
1471
+ "<|9.36|>": 50833,
1472
+ "<|9.38|>": 50834,
1473
+ "<|9.40|>": 50835,
1474
+ "<|9.42|>": 50836,
1475
+ "<|9.44|>": 50837,
1476
+ "<|9.46|>": 50838,
1477
+ "<|9.48|>": 50839,
1478
+ "<|9.50|>": 50840,
1479
+ "<|9.52|>": 50841,
1480
+ "<|9.54|>": 50842,
1481
+ "<|9.56|>": 50843,
1482
+ "<|9.58|>": 50844,
1483
+ "<|9.60|>": 50845,
1484
+ "<|9.62|>": 50846,
1485
+ "<|9.64|>": 50847,
1486
+ "<|9.66|>": 50848,
1487
+ "<|9.68|>": 50849,
1488
+ "<|9.70|>": 50850,
1489
+ "<|9.72|>": 50851,
1490
+ "<|9.74|>": 50852,
1491
+ "<|9.76|>": 50853,
1492
+ "<|9.78|>": 50854,
1493
+ "<|9.80|>": 50855,
1494
+ "<|9.82|>": 50856,
1495
+ "<|9.84|>": 50857,
1496
+ "<|9.86|>": 50858,
1497
+ "<|9.88|>": 50859,
1498
+ "<|9.90|>": 50860,
1499
+ "<|9.92|>": 50861,
1500
+ "<|9.94|>": 50862,
1501
+ "<|9.96|>": 50863,
1502
+ "<|9.98|>": 50864,
1503
+ "<|af|>": 50327,
1504
+ "<|am|>": 50334,
1505
+ "<|ar|>": 50272,
1506
+ "<|as|>": 50350,
1507
+ "<|az|>": 50304,
1508
+ "<|ba|>": 50355,
1509
+ "<|be|>": 50330,
1510
+ "<|bg|>": 50292,
1511
+ "<|bn|>": 50302,
1512
+ "<|bo|>": 50347,
1513
+ "<|br|>": 50309,
1514
+ "<|bs|>": 50315,
1515
+ "<|ca|>": 50270,
1516
+ "<|cs|>": 50283,
1517
+ "<|cy|>": 50297,
1518
+ "<|da|>": 50285,
1519
+ "<|de|>": 50261,
1520
+ "<|el|>": 50281,
1521
+ "<|endoftext|>": 50257,
1522
+ "<|en|>": 50259,
1523
+ "<|es|>": 50262,
1524
+ "<|et|>": 50307,
1525
+ "<|eu|>": 50310,
1526
+ "<|fa|>": 50300,
1527
+ "<|fi|>": 50277,
1528
+ "<|fo|>": 50338,
1529
+ "<|fr|>": 50265,
1530
+ "<|gl|>": 50319,
1531
+ "<|gu|>": 50333,
1532
+ "<|haw|>": 50352,
1533
+ "<|ha|>": 50354,
1534
+ "<|he|>": 50279,
1535
+ "<|hi|>": 50276,
1536
+ "<|hr|>": 50291,
1537
+ "<|ht|>": 50339,
1538
+ "<|hu|>": 50286,
1539
+ "<|hy|>": 50312,
1540
+ "<|id|>": 50275,
1541
+ "<|is|>": 50311,
1542
+ "<|it|>": 50274,
1543
+ "<|ja|>": 50266,
1544
+ "<|jw|>": 50356,
1545
+ "<|ka|>": 50329,
1546
+ "<|kk|>": 50316,
1547
+ "<|km|>": 50323,
1548
+ "<|kn|>": 50306,
1549
+ "<|ko|>": 50264,
1550
+ "<|la|>": 50294,
1551
+ "<|lb|>": 50345,
1552
+ "<|ln|>": 50353,
1553
+ "<|lo|>": 50336,
1554
+ "<|lt|>": 50293,
1555
+ "<|lv|>": 50301,
1556
+ "<|mg|>": 50349,
1557
+ "<|mi|>": 50295,
1558
+ "<|mk|>": 50308,
1559
+ "<|ml|>": 50296,
1560
+ "<|mn|>": 50314,
1561
+ "<|mr|>": 50320,
1562
+ "<|ms|>": 50282,
1563
+ "<|mt|>": 50343,
1564
+ "<|my|>": 50346,
1565
+ "<|ne|>": 50313,
1566
+ "<|nl|>": 50271,
1567
+ "<|nn|>": 50342,
1568
+ "<|nospeech|>": 50363,
1569
+ "<|notimestamps|>": 50364,
1570
+ "<|no|>": 50288,
1571
+ "<|oc|>": 50328,
1572
+ "<|pa|>": 50321,
1573
+ "<|pl|>": 50269,
1574
+ "<|ps|>": 50340,
1575
+ "<|pt|>": 50267,
1576
+ "<|ro|>": 50284,
1577
+ "<|ru|>": 50263,
1578
+ "<|sa|>": 50344,
1579
+ "<|sd|>": 50332,
1580
+ "<|si|>": 50322,
1581
+ "<|sk|>": 50298,
1582
+ "<|sl|>": 50305,
1583
+ "<|sn|>": 50324,
1584
+ "<|so|>": 50326,
1585
+ "<|sq|>": 50317,
1586
+ "<|sr|>": 50303,
1587
+ "<|startoflm|>": 50361,
1588
+ "<|startofprev|>": 50362,
1589
+ "<|startoftranscript|>": 50258,
1590
+ "<|su|>": 50357,
1591
+ "<|sv|>": 50273,
1592
+ "<|sw|>": 50318,
1593
+ "<|ta|>": 50287,
1594
+ "<|te|>": 50299,
1595
+ "<|tg|>": 50331,
1596
+ "<|th|>": 50289,
1597
+ "<|tk|>": 50341,
1598
+ "<|tl|>": 50348,
1599
+ "<|transcribe|>": 50360,
1600
+ "<|translate|>": 50359,
1601
+ "<|tr|>": 50268,
1602
+ "<|tt|>": 50351,
1603
+ "<|uk|>": 50280,
1604
+ "<|ur|>": 50290,
1605
+ "<|uz|>": 50337,
1606
+ "<|vi|>": 50278,
1607
+ "<|yi|>": 50335,
1608
+ "<|yo|>": 50325,
1609
+ "<|yue|>": 50358,
1610
+ "<|zh|>": 50260
1611
+ }
config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "distil-whisper/distil-large-v3",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "gelu",
5
+ "apply_spec_augment": false,
6
+ "architectures": [
7
+ "WhisperForConditionalGeneration"
8
+ ],
9
+ "attention_dropout": 0.0,
10
+ "begin_suppress_tokens": [
11
+ 220,
12
+ 50257
13
+ ],
14
+ "bos_token_id": 50257,
15
+ "classifier_proj_size": 256,
16
+ "d_model": 1280,
17
+ "decoder_attention_heads": 20,
18
+ "decoder_ffn_dim": 5120,
19
+ "decoder_layerdrop": 0.0,
20
+ "decoder_layers": 2,
21
+ "decoder_start_token_id": 50258,
22
+ "dropout": 0.0,
23
+ "encoder_attention_heads": 20,
24
+ "encoder_ffn_dim": 5120,
25
+ "encoder_layerdrop": 0.0,
26
+ "encoder_layers": 32,
27
+ "eos_token_id": 50257,
28
+ "forced_decoder_ids": null,
29
+ "init_std": 0.02,
30
+ "is_encoder_decoder": true,
31
+ "mask_feature_length": 10,
32
+ "mask_feature_min_masks": 0,
33
+ "mask_feature_prob": 0.0,
34
+ "mask_time_length": 10,
35
+ "mask_time_min_masks": 2,
36
+ "mask_time_prob": 0.05,
37
+ "max_length": 448,
38
+ "max_source_positions": 1500,
39
+ "max_target_positions": 448,
40
+ "median_filter_width": 7,
41
+ "model_type": "whisper",
42
+ "num_hidden_layers": 32,
43
+ "num_mel_bins": 128,
44
+ "pad_token_id": 50256,
45
+ "scale_embedding": false,
46
+ "torch_dtype": "float32",
47
+ "transformers_version": "4.40.0.dev0",
48
+ "use_cache": true,
49
+ "use_weighted_layer_sum": false,
50
+ "vocab_size": 51866
51
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3d092e62cb3ae648dc1d265186dd74bf85987443ecc851bc298c63c67272926
3
+ size 3025686376
normalizer.json ADDED
@@ -0,0 +1,1742 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "accessorise": "accessorize",
3
+ "accessorised": "accessorized",
4
+ "accessorises": "accessorizes",
5
+ "accessorising": "accessorizing",
6
+ "acclimatisation": "acclimatization",
7
+ "acclimatise": "acclimatize",
8
+ "acclimatised": "acclimatized",
9
+ "acclimatises": "acclimatizes",
10
+ "acclimatising": "acclimatizing",
11
+ "accoutrements": "accouterments",
12
+ "aeon": "eon",
13
+ "aeons": "eons",
14
+ "aerogramme": "aerogram",
15
+ "aerogrammes": "aerograms",
16
+ "aeroplane": "airplane",
17
+ "aeroplanes": "airplanes",
18
+ "aesthete": "esthete",
19
+ "aesthetes": "esthetes",
20
+ "aesthetic": "esthetic",
21
+ "aesthetically": "esthetically",
22
+ "aesthetics": "esthetics",
23
+ "aetiology": "etiology",
24
+ "ageing": "aging",
25
+ "aggrandisement": "aggrandizement",
26
+ "agonise": "agonize",
27
+ "agonised": "agonized",
28
+ "agonises": "agonizes",
29
+ "agonising": "agonizing",
30
+ "agonisingly": "agonizingly",
31
+ "almanack": "almanac",
32
+ "almanacks": "almanacs",
33
+ "aluminium": "aluminum",
34
+ "amortisable": "amortizable",
35
+ "amortisation": "amortization",
36
+ "amortisations": "amortizations",
37
+ "amortise": "amortize",
38
+ "amortised": "amortized",
39
+ "amortises": "amortizes",
40
+ "amortising": "amortizing",
41
+ "amphitheatre": "amphitheater",
42
+ "amphitheatres": "amphitheaters",
43
+ "anaemia": "anemia",
44
+ "anaemic": "anemic",
45
+ "anaesthesia": "anesthesia",
46
+ "anaesthetic": "anesthetic",
47
+ "anaesthetics": "anesthetics",
48
+ "anaesthetise": "anesthetize",
49
+ "anaesthetised": "anesthetized",
50
+ "anaesthetises": "anesthetizes",
51
+ "anaesthetising": "anesthetizing",
52
+ "anaesthetist": "anesthetist",
53
+ "anaesthetists": "anesthetists",
54
+ "anaesthetize": "anesthetize",
55
+ "anaesthetized": "anesthetized",
56
+ "anaesthetizes": "anesthetizes",
57
+ "anaesthetizing": "anesthetizing",
58
+ "analogue": "analog",
59
+ "analogues": "analogs",
60
+ "analyse": "analyze",
61
+ "analysed": "analyzed",
62
+ "analyses": "analyzes",
63
+ "analysing": "analyzing",
64
+ "anglicise": "anglicize",
65
+ "anglicised": "anglicized",
66
+ "anglicises": "anglicizes",
67
+ "anglicising": "anglicizing",
68
+ "annualised": "annualized",
69
+ "antagonise": "antagonize",
70
+ "antagonised": "antagonized",
71
+ "antagonises": "antagonizes",
72
+ "antagonising": "antagonizing",
73
+ "apologise": "apologize",
74
+ "apologised": "apologized",
75
+ "apologises": "apologizes",
76
+ "apologising": "apologizing",
77
+ "appal": "appall",
78
+ "appals": "appalls",
79
+ "appetiser": "appetizer",
80
+ "appetisers": "appetizers",
81
+ "appetising": "appetizing",
82
+ "appetisingly": "appetizingly",
83
+ "arbour": "arbor",
84
+ "arbours": "arbors",
85
+ "archaeologically": "archeologically",
86
+ "archaeologist": "archeologist",
87
+ "archaeologists": "archeologists",
88
+ "archaeology": "archeology</span>",
89
+ "archeological": "archaeological",
90
+ "ardour": "ardor",
91
+ "armour": "armor",
92
+ "armoured": "armored",
93
+ "armourer": "armorer",
94
+ "armourers": "armorers",
95
+ "armouries": "armories",
96
+ "armoury": "armory",
97
+ "artefact": "artifact",
98
+ "artefacts": "artifacts",
99
+ "authorise": "authorize",
100
+ "authorised": "authorized",
101
+ "authorises": "authorizes",
102
+ "authorising": "authorizing",
103
+ "axe": "ax",
104
+ "backpedalled": "backpedaled",
105
+ "backpedalling": "backpedaling",
106
+ "bannister": "banister",
107
+ "bannisters": "banisters",
108
+ "baptise": "baptize",
109
+ "baptised": "baptized",
110
+ "baptises": "baptizes",
111
+ "baptising": "baptizing",
112
+ "bastardise": "bastardize",
113
+ "bastardised": "bastardized",
114
+ "bastardises": "bastardizes",
115
+ "bastardising": "bastardizing",
116
+ "battleax": "battleaxe",
117
+ "baulk": "balk",
118
+ "baulked": "balked",
119
+ "baulking": "balking",
120
+ "baulks": "balks",
121
+ "bedevilled": "bedeviled",
122
+ "bedevilling": "bedeviling",
123
+ "behaviour": "behavior",
124
+ "behavioural": "behavioral",
125
+ "behaviourism": "behaviorism",
126
+ "behaviourist": "behaviorist",
127
+ "behaviourists": "behaviorists",
128
+ "behaviours": "behaviors",
129
+ "behove": "behoove",
130
+ "behoved": "behooved",
131
+ "behoves": "behooves",
132
+ "bejewelled": "bejeweled",
133
+ "belabour": "belabor",
134
+ "belaboured": "belabored",
135
+ "belabouring": "belaboring",
136
+ "belabours": "belabors",
137
+ "bevelled": "beveled",
138
+ "bevvies": "bevies",
139
+ "bevvy": "bevy",
140
+ "biassed": "biased",
141
+ "biassing": "biasing",
142
+ "bingeing": "binging",
143
+ "bougainvillaea": "bougainvillea",
144
+ "bougainvillaeas": "bougainvilleas",
145
+ "bowdlerise": "bowdlerize",
146
+ "bowdlerised": "bowdlerized",
147
+ "bowdlerises": "bowdlerizes",
148
+ "bowdlerising": "bowdlerizing",
149
+ "breathalyse": "breathalyze",
150
+ "breathalysed": "breathalyzed",
151
+ "breathalyser": "breathalyzer",
152
+ "breathalysers": "breathalyzers",
153
+ "breathalyses": "breathalyzes",
154
+ "breathalysing": "breathalyzing",
155
+ "brutalise": "brutalize",
156
+ "brutalised": "brutalized",
157
+ "brutalises": "brutalizes",
158
+ "brutalising": "brutalizing",
159
+ "busses": "buses",
160
+ "bussing": "busing",
161
+ "caesarean": "cesarean",
162
+ "caesareans": "cesareans",
163
+ "calibre": "caliber",
164
+ "calibres": "calibers",
165
+ "calliper": "caliper",
166
+ "callipers": "calipers",
167
+ "callisthenics": "calisthenics",
168
+ "canalise": "canalize",
169
+ "canalised": "canalized",
170
+ "canalises": "canalizes",
171
+ "canalising": "canalizing",
172
+ "cancelation": "cancellation",
173
+ "cancelations": "cancellations",
174
+ "cancelled": "canceled",
175
+ "cancelling": "canceling",
176
+ "candour": "candor",
177
+ "cannibalise": "cannibalize",
178
+ "cannibalised": "cannibalized",
179
+ "cannibalises": "cannibalizes",
180
+ "cannibalising": "cannibalizing",
181
+ "canonise": "canonize",
182
+ "canonised": "canonized",
183
+ "canonises": "canonizes",
184
+ "canonising": "canonizing",
185
+ "capitalise": "capitalize",
186
+ "capitalised": "capitalized",
187
+ "capitalises": "capitalizes",
188
+ "capitalising": "capitalizing",
189
+ "caramelise": "caramelize",
190
+ "caramelised": "caramelized",
191
+ "caramelises": "caramelizes",
192
+ "caramelising": "caramelizing",
193
+ "carbonise": "carbonize",
194
+ "carbonised": "carbonized",
195
+ "carbonises": "carbonizes",
196
+ "carbonising": "carbonizing",
197
+ "carolled": "caroled",
198
+ "carolling": "caroling",
199
+ "catalogue": "catalog",
200
+ "catalogued": "cataloged",
201
+ "catalogues": "catalogs",
202
+ "cataloguing": "cataloging",
203
+ "catalyse": "catalyze",
204
+ "catalysed": "catalyzed",
205
+ "catalyses": "catalyzes",
206
+ "catalysing": "catalyzing",
207
+ "categorise": "categorize",
208
+ "categorised": "categorized",
209
+ "categorises": "categorizes",
210
+ "categorising": "categorizing",
211
+ "cauterise": "cauterize",
212
+ "cauterised": "cauterized",
213
+ "cauterises": "cauterizes",
214
+ "cauterising": "cauterizing",
215
+ "cavilled": "caviled",
216
+ "cavilling": "caviling",
217
+ "centigramme": "centigram",
218
+ "centigrammes": "centigrams",
219
+ "centilitre": "centiliter",
220
+ "centilitres": "centiliters",
221
+ "centimetre": "centimeter",
222
+ "centimetres": "centimeters",
223
+ "centralise": "centralize",
224
+ "centralised": "centralized",
225
+ "centralises": "centralizes",
226
+ "centralising": "centralizing",
227
+ "centre": "center",
228
+ "centred": "centered",
229
+ "centrefold": "centerfold",
230
+ "centrefolds": "centerfolds",
231
+ "centrepiece": "centerpiece",
232
+ "centrepieces": "centerpieces",
233
+ "centres": "centers",
234
+ "channelled": "channeled",
235
+ "channelling": "channeling",
236
+ "characterise": "characterize",
237
+ "characterised": "characterized",
238
+ "characterises": "characterizes",
239
+ "characterising": "characterizing",
240
+ "cheque": "check",
241
+ "chequebook": "checkbook",
242
+ "chequebooks": "checkbooks",
243
+ "chequered": "checkered",
244
+ "cheques": "checks",
245
+ "chilli": "chili",
246
+ "chimaera": "chimera",
247
+ "chimaeras": "chimeras",
248
+ "chiselled": "chiseled",
249
+ "chiselling": "chiseling",
250
+ "circularise": "circularize",
251
+ "circularised": "circularized",
252
+ "circularises": "circularizes",
253
+ "circularising": "circularizing",
254
+ "civilise": "civilize",
255
+ "civilised": "civilized",
256
+ "civilises": "civilizes",
257
+ "civilising": "civilizing",
258
+ "clamour": "clamor",
259
+ "clamoured": "clamored",
260
+ "clamouring": "clamoring",
261
+ "clamours": "clamors",
262
+ "clangour": "clangor",
263
+ "clarinettist": "clarinetist",
264
+ "clarinettists": "clarinetists",
265
+ "collectivise": "collectivize",
266
+ "collectivised": "collectivized",
267
+ "collectivises": "collectivizes",
268
+ "collectivising": "collectivizing",
269
+ "colonisation": "colonization",
270
+ "colonise": "colonize",
271
+ "colonised": "colonized",
272
+ "coloniser": "colonizer",
273
+ "colonisers": "colonizers",
274
+ "colonises": "colonizes",
275
+ "colonising": "colonizing",
276
+ "colour": "color",
277
+ "colourant": "colorant",
278
+ "colourants": "colorants",
279
+ "coloured": "colored",
280
+ "coloureds": "coloreds",
281
+ "colourful": "colorful",
282
+ "colourfully": "colorfully",
283
+ "colouring": "coloring",
284
+ "colourize": "colorize",
285
+ "colourized": "colorized",
286
+ "colourizes": "colorizes",
287
+ "colourizing": "colorizing",
288
+ "colourless": "colorless",
289
+ "colours": "colors",
290
+ "commercialise": "commercialize",
291
+ "commercialised": "commercialized",
292
+ "commercialises": "commercializes",
293
+ "commercialising": "commercializing",
294
+ "compartmentalise": "compartmentalize",
295
+ "compartmentalised": "compartmentalized",
296
+ "compartmentalises": "compartmentalizes",
297
+ "compartmentalising": "compartmentalizing",
298
+ "computerise": "computerize",
299
+ "computerised": "computerized",
300
+ "computerises": "computerizes",
301
+ "computerising": "computerizing",
302
+ "conceptualise": "conceptualize",
303
+ "conceptualised": "conceptualized",
304
+ "conceptualises": "conceptualizes",
305
+ "conceptualising": "conceptualizing",
306
+ "connexion": "connection",
307
+ "connexions": "connections",
308
+ "contextualise": "contextualize",
309
+ "contextualised": "contextualized",
310
+ "contextualises": "contextualizes",
311
+ "contextualising": "contextualizing",
312
+ "cosier": "cozier",
313
+ "cosies": "cozies",
314
+ "cosiest": "coziest",
315
+ "cosily": "cozily",
316
+ "cosiness": "coziness",
317
+ "cosy": "cozy",
318
+ "councillor": "councilor",
319
+ "councillors": "councilors",
320
+ "counselled": "counseled",
321
+ "counselling": "counseling",
322
+ "counsellor": "counselor",
323
+ "counsellors": "counselors",
324
+ "crenelated": "crenellated",
325
+ "criminalise": "criminalize",
326
+ "criminalised": "criminalized",
327
+ "criminalises": "criminalizes",
328
+ "criminalising": "criminalizing",
329
+ "criticise": "criticize",
330
+ "criticised": "criticized",
331
+ "criticises": "criticizes",
332
+ "criticising": "criticizing",
333
+ "crueller": "crueler",
334
+ "cruellest": "cruelest",
335
+ "crystallisation": "crystallization",
336
+ "crystallise": "crystallize",
337
+ "crystallised": "crystallized",
338
+ "crystallises": "crystallizes",
339
+ "crystallising": "crystallizing",
340
+ "cudgelled": "cudgeled",
341
+ "cudgelling": "cudgeling",
342
+ "customise": "customize",
343
+ "customised": "customized",
344
+ "customises": "customizes",
345
+ "customising": "customizing",
346
+ "cypher": "cipher",
347
+ "cyphers": "ciphers",
348
+ "decentralisation": "decentralization",
349
+ "decentralise": "decentralize",
350
+ "decentralised": "decentralized",
351
+ "decentralises": "decentralizes",
352
+ "decentralising": "decentralizing",
353
+ "decriminalisation": "decriminalization",
354
+ "decriminalise": "decriminalize",
355
+ "decriminalised": "decriminalized",
356
+ "decriminalises": "decriminalizes",
357
+ "decriminalising": "decriminalizing",
358
+ "defence": "defense",
359
+ "defenceless": "defenseless",
360
+ "defences": "defenses",
361
+ "dehumanisation": "dehumanization",
362
+ "dehumanise": "dehumanize",
363
+ "dehumanised": "dehumanized",
364
+ "dehumanises": "dehumanizes",
365
+ "dehumanising": "dehumanizing",
366
+ "demeanour": "demeanor",
367
+ "demilitarisation": "demilitarization",
368
+ "demilitarise": "demilitarize",
369
+ "demilitarised": "demilitarized",
370
+ "demilitarises": "demilitarizes",
371
+ "demilitarising": "demilitarizing",
372
+ "demobilisation": "demobilization",
373
+ "demobilise": "demobilize",
374
+ "demobilised": "demobilized",
375
+ "demobilises": "demobilizes",
376
+ "demobilising": "demobilizing",
377
+ "democratisation": "democratization",
378
+ "democratise": "democratize",
379
+ "democratised": "democratized",
380
+ "democratises": "democratizes",
381
+ "democratising": "democratizing",
382
+ "demonise": "demonize",
383
+ "demonised": "demonized",
384
+ "demonises": "demonizes",
385
+ "demonising": "demonizing",
386
+ "demoralisation": "demoralization",
387
+ "demoralise": "demoralize",
388
+ "demoralised": "demoralized",
389
+ "demoralises": "demoralizes",
390
+ "demoralising": "demoralizing",
391
+ "denationalisation": "denationalization",
392
+ "denationalise": "denationalize",
393
+ "denationalised": "denationalized",
394
+ "denationalises": "denationalizes",
395
+ "denationalising": "denationalizing",
396
+ "deodorise": "deodorize",
397
+ "deodorised": "deodorized",
398
+ "deodorises": "deodorizes",
399
+ "deodorising": "deodorizing",
400
+ "depersonalise": "depersonalize",
401
+ "depersonalised": "depersonalized",
402
+ "depersonalises": "depersonalizes",
403
+ "depersonalising": "depersonalizing",
404
+ "deputise": "deputize",
405
+ "deputised": "deputized",
406
+ "deputises": "deputizes",
407
+ "deputising": "deputizing",
408
+ "desensitisation": "desensitization",
409
+ "desensitise": "desensitize",
410
+ "desensitised": "desensitized",
411
+ "desensitises": "desensitizes",
412
+ "desensitising": "desensitizing",
413
+ "destabilisation": "destabilization",
414
+ "destabilise": "destabilize",
415
+ "destabilised": "destabilized",
416
+ "destabilises": "destabilizes",
417
+ "destabilising": "destabilizing",
418
+ "dialled": "dialed",
419
+ "dialling": "dialing",
420
+ "dialogue": "dialog",
421
+ "dialogues": "dialogs",
422
+ "diarrhoea": "diarrhea",
423
+ "digitise": "digitize",
424
+ "digitised": "digitized",
425
+ "digitises": "digitizes",
426
+ "digitising": "digitizing",
427
+ "disc": "disk",
428
+ "discolour": "discolor",
429
+ "discoloured": "discolored",
430
+ "discolouring": "discoloring",
431
+ "discolours": "discolors",
432
+ "discs": "disks",
433
+ "disembowelled": "disemboweled",
434
+ "disembowelling": "disemboweling",
435
+ "disfavour": "disfavor",
436
+ "dishevelled": "disheveled",
437
+ "dishonour": "dishonor",
438
+ "dishonourable": "dishonorable",
439
+ "dishonourably": "dishonorably",
440
+ "dishonoured": "dishonored",
441
+ "dishonouring": "dishonoring",
442
+ "dishonours": "dishonors",
443
+ "disorganisation": "disorganization",
444
+ "disorganised": "disorganized",
445
+ "distil": "distill",
446
+ "distils": "distills",
447
+ "dramatisation": "dramatization",
448
+ "dramatisations": "dramatizations",
449
+ "dramatise": "dramatize",
450
+ "dramatised": "dramatized",
451
+ "dramatises": "dramatizes",
452
+ "dramatising": "dramatizing",
453
+ "draught": "draft",
454
+ "draughtboard": "draftboard",
455
+ "draughtboards": "draftboards",
456
+ "draughtier": "draftier",
457
+ "draughtiest": "draftiest",
458
+ "draughts": "drafts",
459
+ "draughtsman": "draftsman",
460
+ "draughtsmanship": "draftsmanship",
461
+ "draughtsmen": "draftsmen",
462
+ "draughtswoman": "draftswoman",
463
+ "draughtswomen": "draftswomen",
464
+ "draughty": "drafty",
465
+ "drivelled": "driveled",
466
+ "drivelling": "driveling",
467
+ "duelled": "dueled",
468
+ "duelling": "dueling",
469
+ "economise": "economize",
470
+ "economised": "economized",
471
+ "economises": "economizes",
472
+ "economising": "economizing",
473
+ "editorialise": "editorialize",
474
+ "editorialised": "editorialized",
475
+ "editorialises": "editorializes",
476
+ "editorialising": "editorializing",
477
+ "edoema": "edema",
478
+ "empathise": "empathize",
479
+ "empathised": "empathized",
480
+ "empathises": "empathizes",
481
+ "empathising": "empathizing",
482
+ "emphasise": "emphasize",
483
+ "emphasised": "emphasized",
484
+ "emphasises": "emphasizes",
485
+ "emphasising": "emphasizing",
486
+ "enamelled": "enameled",
487
+ "enamelling": "enameling",
488
+ "enamoured": "enamored",
489
+ "encyclopaedia": "encyclopedia",
490
+ "encyclopaedias": "encyclopedias",
491
+ "encyclopaedic": "encyclopedic",
492
+ "endeavour": "endeavor",
493
+ "endeavoured": "endeavored",
494
+ "endeavouring": "endeavoring",
495
+ "endeavours": "endeavors",
496
+ "energise": "energize",
497
+ "energised": "energized",
498
+ "energises": "energizes",
499
+ "energising": "energizing",
500
+ "enrol": "enroll",
501
+ "enrols": "enrolls",
502
+ "enthral": "enthrall",
503
+ "enthrals": "enthralls",
504
+ "epaulette": "epaulet",
505
+ "epaulettes": "epaulets",
506
+ "epicentre": "epicenter",
507
+ "epicentres": "epicenters",
508
+ "epilogue": "epilog",
509
+ "epilogues": "epilogs",
510
+ "epitomise": "epitomize",
511
+ "epitomised": "epitomized",
512
+ "epitomises": "epitomizes",
513
+ "epitomising": "epitomizing",
514
+ "equalisation": "equalization",
515
+ "equalise": "equalize",
516
+ "equalised": "equalized",
517
+ "equaliser": "equalizer",
518
+ "equalisers": "equalizers",
519
+ "equalises": "equalizes",
520
+ "equalising": "equalizing",
521
+ "eulogise": "eulogize",
522
+ "eulogised": "eulogized",
523
+ "eulogises": "eulogizes",
524
+ "eulogising": "eulogizing",
525
+ "evangelise": "evangelize",
526
+ "evangelised": "evangelized",
527
+ "evangelises": "evangelizes",
528
+ "evangelising": "evangelizing",
529
+ "exorcise": "exorcize",
530
+ "exorcised": "exorcized",
531
+ "exorcises": "exorcizes",
532
+ "exorcising": "exorcizing",
533
+ "extemporisation": "extemporization",
534
+ "extemporise": "extemporize",
535
+ "extemporised": "extemporized",
536
+ "extemporises": "extemporizes",
537
+ "extemporising": "extemporizing",
538
+ "externalisation": "externalization",
539
+ "externalisations": "externalizations",
540
+ "externalise": "externalize",
541
+ "externalised": "externalized",
542
+ "externalises": "externalizes",
543
+ "externalising": "externalizing",
544
+ "factorise": "factorize",
545
+ "factorised": "factorized",
546
+ "factorises": "factorizes",
547
+ "factorising": "factorizing",
548
+ "faecal": "fecal",
549
+ "faeces": "feces",
550
+ "familiarisation": "familiarization",
551
+ "familiarise": "familiarize",
552
+ "familiarised": "familiarized",
553
+ "familiarises": "familiarizes",
554
+ "familiarising": "familiarizing",
555
+ "fantasise": "fantasize",
556
+ "fantasised": "fantasized",
557
+ "fantasises": "fantasizes",
558
+ "fantasising": "fantasizing",
559
+ "favour": "favor",
560
+ "favourable": "favorable",
561
+ "favourably": "favorably",
562
+ "favoured": "favored",
563
+ "favouring": "favoring",
564
+ "favourite": "favorite",
565
+ "favourites": "favorites",
566
+ "favouritism": "favoritism",
567
+ "favours": "favors",
568
+ "feminise": "feminize",
569
+ "feminised": "feminized",
570
+ "feminises": "feminizes",
571
+ "feminising": "feminizing",
572
+ "fertilisation": "fertilization",
573
+ "fertilise": "fertilize",
574
+ "fertilised": "fertilized",
575
+ "fertiliser": "fertilizer",
576
+ "fertilisers": "fertilizers",
577
+ "fertilises": "fertilizes",
578
+ "fertilising": "fertilizing",
579
+ "fervour": "fervor",
580
+ "fibre": "fiber",
581
+ "fibreglass": "fiberglass",
582
+ "fibres": "fibers",
583
+ "fictionalisation": "fictionalization",
584
+ "fictionalisations": "fictionalizations",
585
+ "fictionalise": "fictionalize",
586
+ "fictionalised": "fictionalized",
587
+ "fictionalises": "fictionalizes",
588
+ "fictionalising": "fictionalizing",
589
+ "fillet": "filet",
590
+ "filleted": "fileted",
591
+ "filleting": "fileting",
592
+ "fillets": "filets",
593
+ "finalisation": "finalization",
594
+ "finalise": "finalize",
595
+ "finalised": "finalized",
596
+ "finalises": "finalizes",
597
+ "finalising": "finalizing",
598
+ "flautist": "flutist",
599
+ "flautists": "flutists",
600
+ "flavour": "flavor",
601
+ "flavoured": "flavored",
602
+ "flavouring": "flavoring",
603
+ "flavourings": "flavorings",
604
+ "flavourless": "flavorless",
605
+ "flavours": "flavors",
606
+ "flavoursome": "flavorsome",
607
+ "flyer / flier": "flier / flyer",
608
+ "foetal": "fetal",
609
+ "foetid": "fetid",
610
+ "foetus": "fetus",
611
+ "foetuses": "fetuses",
612
+ "formalisation": "formalization",
613
+ "formalise": "formalize",
614
+ "formalised": "formalized",
615
+ "formalises": "formalizes",
616
+ "formalising": "formalizing",
617
+ "fossilisation": "fossilization",
618
+ "fossilise": "fossilize",
619
+ "fossilised": "fossilized",
620
+ "fossilises": "fossilizes",
621
+ "fossilising": "fossilizing",
622
+ "fraternisation": "fraternization",
623
+ "fraternise": "fraternize",
624
+ "fraternised": "fraternized",
625
+ "fraternises": "fraternizes",
626
+ "fraternising": "fraternizing",
627
+ "fulfil": "fulfill",
628
+ "fulfilment": "fulfillment",
629
+ "fulfils": "fulfills",
630
+ "funnelled": "funneled",
631
+ "funnelling": "funneling",
632
+ "gage": "gauge",
633
+ "gaged": "gauged",
634
+ "gages": "gauges",
635
+ "gaging": "gauging",
636
+ "galvanise": "galvanize",
637
+ "galvanised": "galvanized",
638
+ "galvanises": "galvanizes",
639
+ "galvanising": "galvanizing",
640
+ "gambolled": "gamboled",
641
+ "gambolling": "gamboling",
642
+ "gaol": "jail",
643
+ "gaolbird": "jailbird",
644
+ "gaolbirds": "jailbirds",
645
+ "gaolbreak": "jailbreak",
646
+ "gaolbreaks": "jailbreaks",
647
+ "gaoled": "jailed",
648
+ "gaoler": "jailer",
649
+ "gaolers": "jailers",
650
+ "gaoling": "jailing",
651
+ "gaols": "jails",
652
+ "gasses": "gases",
653
+ "generalisation": "generalization",
654
+ "generalisations": "generalizations",
655
+ "generalise": "generalize",
656
+ "generalised": "generalized",
657
+ "generalises": "generalizes",
658
+ "generalising": "generalizing",
659
+ "ghettoise": "ghettoize",
660
+ "ghettoised": "ghettoized",
661
+ "ghettoises": "ghettoizes",
662
+ "ghettoising": "ghettoizing",
663
+ "gipsies": "gypsies",
664
+ "glamor": "glamour",
665
+ "glamorise": "glamorize",
666
+ "glamorised": "glamorized",
667
+ "glamorises": "glamorizes",
668
+ "glamorising": "glamorizing",
669
+ "globalisation": "globalization",
670
+ "globalise": "globalize",
671
+ "globalised": "globalized",
672
+ "globalises": "globalizes",
673
+ "globalising": "globalizing",
674
+ "glueing": "gluing",
675
+ "goitre": "goiter",
676
+ "goitres": "goiters",
677
+ "gonorrhoea": "gonorrhea",
678
+ "gramme": "gram",
679
+ "grammes": "grams",
680
+ "gravelled": "graveled",
681
+ "grey": "gray",
682
+ "greyed": "grayed",
683
+ "greying": "graying",
684
+ "greyish": "grayish",
685
+ "greyness": "grayness",
686
+ "greys": "grays",
687
+ "grovelled": "groveled",
688
+ "grovelling": "groveling",
689
+ "groyne": "groin",
690
+ "groynes": "groins",
691
+ "gruelling": "grueling",
692
+ "gruellingly": "gruelingly",
693
+ "gryphon": "griffin",
694
+ "gryphons": "griffins",
695
+ "gynaecological": "gynecological",
696
+ "gynaecologist": "gynecologist",
697
+ "gynaecologists": "gynecologists",
698
+ "gynaecology": "gynecology",
699
+ "haematological": "hematological",
700
+ "haematologist": "hematologist",
701
+ "haematologists": "hematologists",
702
+ "haematology": "hematology",
703
+ "haemoglobin": "hemoglobin",
704
+ "haemophilia": "hemophilia",
705
+ "haemophiliac": "hemophiliac",
706
+ "haemophiliacs": "hemophiliacs",
707
+ "haemorrhage": "hemorrhage",
708
+ "haemorrhaged": "hemorrhaged",
709
+ "haemorrhages": "hemorrhages",
710
+ "haemorrhaging": "hemorrhaging",
711
+ "haemorrhoids": "hemorrhoids",
712
+ "harbour": "harbor",
713
+ "harboured": "harbored",
714
+ "harbouring": "harboring",
715
+ "harbours": "harbors",
716
+ "harmonisation": "harmonization",
717
+ "harmonise": "harmonize",
718
+ "harmonised": "harmonized",
719
+ "harmonises": "harmonizes",
720
+ "harmonising": "harmonizing",
721
+ "homoeopath": "homeopath",
722
+ "homoeopathic": "homeopathic",
723
+ "homoeopaths": "homeopaths",
724
+ "homoeopathy": "homeopathy",
725
+ "homogenise": "homogenize",
726
+ "homogenised": "homogenized",
727
+ "homogenises": "homogenizes",
728
+ "homogenising": "homogenizing",
729
+ "honour": "honor",
730
+ "honourable": "honorable",
731
+ "honourably": "honorably",
732
+ "honoured": "honored",
733
+ "honouring": "honoring",
734
+ "honours": "honors",
735
+ "hospitalisation": "hospitalization",
736
+ "hospitalise": "hospitalize",
737
+ "hospitalised": "hospitalized",
738
+ "hospitalises": "hospitalizes",
739
+ "hospitalising": "hospitalizing",
740
+ "humanise": "humanize",
741
+ "humanised": "humanized",
742
+ "humanises": "humanizes",
743
+ "humanising": "humanizing",
744
+ "humour": "humor",
745
+ "humoured": "humored",
746
+ "humouring": "humoring",
747
+ "humourless": "humorless",
748
+ "humours": "humors",
749
+ "hybridise": "hybridize",
750
+ "hybridised": "hybridized",
751
+ "hybridises": "hybridizes",
752
+ "hybridising": "hybridizing",
753
+ "hypnotise": "hypnotize",
754
+ "hypnotised": "hypnotized",
755
+ "hypnotises": "hypnotizes",
756
+ "hypnotising": "hypnotizing",
757
+ "hypothesise": "hypothesize",
758
+ "hypothesised": "hypothesized",
759
+ "hypothesises": "hypothesizes",
760
+ "hypothesising": "hypothesizing",
761
+ "idealisation": "idealization",
762
+ "idealise": "idealize",
763
+ "idealised": "idealized",
764
+ "idealises": "idealizes",
765
+ "idealising": "idealizing",
766
+ "idolise": "idolize",
767
+ "idolised": "idolized",
768
+ "idolises": "idolizes",
769
+ "idolising": "idolizing",
770
+ "immobilisation": "immobilization",
771
+ "immobilise": "immobilize",
772
+ "immobilised": "immobilized",
773
+ "immobiliser": "immobilizer",
774
+ "immobilisers": "immobilizers",
775
+ "immobilises": "immobilizes",
776
+ "immobilising": "immobilizing",
777
+ "immortalise": "immortalize",
778
+ "immortalised": "immortalized",
779
+ "immortalises": "immortalizes",
780
+ "immortalising": "immortalizing",
781
+ "immunisation": "immunization",
782
+ "immunise": "immunize",
783
+ "immunised": "immunized",
784
+ "immunises": "immunizes",
785
+ "immunising": "immunizing",
786
+ "impanelled": "impaneled",
787
+ "impanelling": "impaneling",
788
+ "imperilled": "imperiled",
789
+ "imperilling": "imperiling",
790
+ "individualise": "individualize",
791
+ "individualised": "individualized",
792
+ "individualises": "individualizes",
793
+ "individualising": "individualizing",
794
+ "industrialise": "industrialize",
795
+ "industrialised": "industrialized",
796
+ "industrialises": "industrializes",
797
+ "industrialising": "industrializing",
798
+ "inflexion": "inflection",
799
+ "inflexions": "inflections",
800
+ "initialise": "initialize",
801
+ "initialised": "initialized",
802
+ "initialises": "initializes",
803
+ "initialising": "initializing",
804
+ "initialled": "initialed",
805
+ "initialling": "initialing",
806
+ "instal": "install",
807
+ "instalment": "installment",
808
+ "instalments": "installments",
809
+ "instals": "installs",
810
+ "instil": "instill",
811
+ "instils": "instills",
812
+ "institutionalisation": "institutionalization",
813
+ "institutionalise": "institutionalize",
814
+ "institutionalised": "institutionalized",
815
+ "institutionalises": "institutionalizes",
816
+ "institutionalising": "institutionalizing",
817
+ "intellectualise": "intellectualize",
818
+ "intellectualised": "intellectualized",
819
+ "intellectualises": "intellectualizes",
820
+ "intellectualising": "intellectualizing",
821
+ "internalisation": "internalization",
822
+ "internalise": "internalize",
823
+ "internalised": "internalized",
824
+ "internalises": "internalizes",
825
+ "internalising": "internalizing",
826
+ "internationalisation": "internationalization",
827
+ "internationalise": "internationalize",
828
+ "internationalised": "internationalized",
829
+ "internationalises": "internationalizes",
830
+ "internationalising": "internationalizing",
831
+ "ionisation": "ionization",
832
+ "ionise": "ionize",
833
+ "ionised": "ionized",
834
+ "ioniser": "ionizer",
835
+ "ionisers": "ionizers",
836
+ "ionises": "ionizes",
837
+ "ionising": "ionizing",
838
+ "italicise": "italicize",
839
+ "italicised": "italicized",
840
+ "italicises": "italicizes",
841
+ "italicising": "italicizing",
842
+ "itemise": "itemize",
843
+ "itemised": "itemized",
844
+ "itemises": "itemizes",
845
+ "itemising": "itemizing",
846
+ "jeopardise": "jeopardize",
847
+ "jeopardised": "jeopardized",
848
+ "jeopardises": "jeopardizes",
849
+ "jeopardising": "jeopardizing",
850
+ "jewelled": "jeweled",
851
+ "jeweller": "jeweler",
852
+ "jewellers": "jewelers",
853
+ "jewellery": "jewelry",
854
+ "judgement": "judgment",
855
+ "kilogramme": "kilogram",
856
+ "kilogrammes": "kilograms",
857
+ "kilometre": "kilometer",
858
+ "kilometres": "kilometers",
859
+ "labelled": "labeled",
860
+ "labelling": "labeling",
861
+ "labour": "labor",
862
+ "laboured": "labored",
863
+ "labourer": "laborer",
864
+ "labourers": "laborers",
865
+ "labouring": "laboring",
866
+ "labours": "labors",
867
+ "lacklustre": "lackluster",
868
+ "legalisation": "legalization",
869
+ "legalise": "legalize",
870
+ "legalised": "legalized",
871
+ "legalises": "legalizes",
872
+ "legalising": "legalizing",
873
+ "legitimise": "legitimize",
874
+ "legitimised": "legitimized",
875
+ "legitimises": "legitimizes",
876
+ "legitimising": "legitimizing",
877
+ "leukaemia": "leukemia",
878
+ "levelled": "leveled",
879
+ "leveller": "leveler",
880
+ "levellers": "levelers",
881
+ "levelling": "leveling",
882
+ "libelled": "libeled",
883
+ "libelling": "libeling",
884
+ "libellous": "libelous",
885
+ "liberalisation": "liberalization",
886
+ "liberalise": "liberalize",
887
+ "liberalised": "liberalized",
888
+ "liberalises": "liberalizes",
889
+ "liberalising": "liberalizing",
890
+ "licence": "license",
891
+ "licenced": "licensed",
892
+ "licences": "licenses",
893
+ "licencing": "licensing",
894
+ "likeable": "likable",
895
+ "lionisation": "lionization",
896
+ "lionise": "lionize",
897
+ "lionised": "lionized",
898
+ "lionises": "lionizes",
899
+ "lionising": "lionizing",
900
+ "liquidise": "liquidize",
901
+ "liquidised": "liquidized",
902
+ "liquidiser": "liquidizer",
903
+ "liquidisers": "liquidizers",
904
+ "liquidises": "liquidizes",
905
+ "liquidising": "liquidizing",
906
+ "litre": "liter",
907
+ "litres": "liters",
908
+ "localise": "localize",
909
+ "localised": "localized",
910
+ "localises": "localizes",
911
+ "localising": "localizing",
912
+ "louvre": "louver",
913
+ "louvred": "louvered",
914
+ "louvres": "louvers",
915
+ "lustre": "luster",
916
+ "magnetise": "magnetize",
917
+ "magnetised": "magnetized",
918
+ "magnetises": "magnetizes",
919
+ "magnetising": "magnetizing",
920
+ "manoeuvrability": "maneuverability",
921
+ "manoeuvrable": "maneuverable",
922
+ "manoeuvre": "maneuver",
923
+ "manoeuvred": "maneuvered",
924
+ "manoeuvres": "maneuvers",
925
+ "manoeuvring": "maneuvering",
926
+ "manoeuvrings": "maneuverings",
927
+ "marginalisation": "marginalization",
928
+ "marginalise": "marginalize",
929
+ "marginalised": "marginalized",
930
+ "marginalises": "marginalizes",
931
+ "marginalising": "marginalizing",
932
+ "marshalled": "marshaled",
933
+ "marshalling": "marshaling",
934
+ "marvelled": "marveled",
935
+ "marvelling": "marveling",
936
+ "marvellous": "marvelous",
937
+ "marvellously": "marvelously",
938
+ "materialisation": "materialization",
939
+ "materialise": "materialize",
940
+ "materialised": "materialized",
941
+ "materialises": "materializes",
942
+ "materialising": "materializing",
943
+ "maximisation": "maximization",
944
+ "maximise": "maximize",
945
+ "maximised": "maximized",
946
+ "maximises": "maximizes",
947
+ "maximising": "maximizing",
948
+ "meagre": "meager",
949
+ "mechanisation": "mechanization",
950
+ "mechanise": "mechanize",
951
+ "mechanised": "mechanized",
952
+ "mechanises": "mechanizes",
953
+ "mechanising": "mechanizing",
954
+ "mediaeval": "medieval",
955
+ "memorialise": "memorialize",
956
+ "memorialised": "memorialized",
957
+ "memorialises": "memorializes",
958
+ "memorialising": "memorializing",
959
+ "memorise": "memorize",
960
+ "memorised": "memorized",
961
+ "memorises": "memorizes",
962
+ "memorising": "memorizing",
963
+ "mesmerise": "mesmerize",
964
+ "mesmerised": "mesmerized",
965
+ "mesmerises": "mesmerizes",
966
+ "mesmerising": "mesmerizing",
967
+ "metabolise": "metabolize",
968
+ "metabolised": "metabolized",
969
+ "metabolises": "metabolizes",
970
+ "metabolising": "metabolizing",
971
+ "metre": "meter",
972
+ "metres": "meters",
973
+ "mhm": "hmm",
974
+ "micrometre": "micrometer",
975
+ "micrometres": "micrometers",
976
+ "militarise": "militarize",
977
+ "militarised": "militarized",
978
+ "militarises": "militarizes",
979
+ "militarising": "militarizing",
980
+ "milligramme": "milligram",
981
+ "milligrammes": "milligrams",
982
+ "millilitre": "milliliter",
983
+ "millilitres": "milliliters",
984
+ "millimetre": "millimeter",
985
+ "millimetres": "millimeters",
986
+ "miniaturisation": "miniaturization",
987
+ "miniaturise": "miniaturize",
988
+ "miniaturised": "miniaturized",
989
+ "miniaturises": "miniaturizes",
990
+ "miniaturising": "miniaturizing",
991
+ "minibusses": "minibuses",
992
+ "minimise": "minimize",
993
+ "minimised": "minimized",
994
+ "minimises": "minimizes",
995
+ "minimising": "minimizing",
996
+ "misbehaviour": "misbehavior",
997
+ "misdemeanour": "misdemeanor",
998
+ "misdemeanours": "misdemeanors",
999
+ "misspelt": "misspelled",
1000
+ "mitre": "miter",
1001
+ "mitres": "miters",
1002
+ "mm": "hmm",
1003
+ "mmm": "hmm",
1004
+ "mobilisation": "mobilization",
1005
+ "mobilise": "mobilize",
1006
+ "mobilised": "mobilized",
1007
+ "mobilises": "mobilizes",
1008
+ "mobilising": "mobilizing",
1009
+ "modelled": "modeled",
1010
+ "modeller": "modeler",
1011
+ "modellers": "modelers",
1012
+ "modelling": "modeling",
1013
+ "modernise": "modernize",
1014
+ "modernised": "modernized",
1015
+ "modernises": "modernizes",
1016
+ "modernising": "modernizing",
1017
+ "moisturise": "moisturize",
1018
+ "moisturised": "moisturized",
1019
+ "moisturiser": "moisturizer",
1020
+ "moisturisers": "moisturizers",
1021
+ "moisturises": "moisturizes",
1022
+ "moisturising": "moisturizing",
1023
+ "monologue": "monolog",
1024
+ "monologues": "monologs",
1025
+ "monopolisation": "monopolization",
1026
+ "monopolise": "monopolize",
1027
+ "monopolised": "monopolized",
1028
+ "monopolises": "monopolizes",
1029
+ "monopolising": "monopolizing",
1030
+ "moralise": "moralize",
1031
+ "moralised": "moralized",
1032
+ "moralises": "moralizes",
1033
+ "moralising": "moralizing",
1034
+ "motorised": "motorized",
1035
+ "mould": "mold",
1036
+ "moulded": "molded",
1037
+ "moulder": "molder",
1038
+ "mouldered": "moldered",
1039
+ "mouldering": "moldering",
1040
+ "moulders": "molders",
1041
+ "mouldier": "moldier",
1042
+ "mouldiest": "moldiest",
1043
+ "moulding": "molding",
1044
+ "mouldings": "moldings",
1045
+ "moulds": "molds",
1046
+ "mouldy": "moldy",
1047
+ "moult": "molt",
1048
+ "moulted": "molted",
1049
+ "moulting": "molting",
1050
+ "moults": "molts",
1051
+ "moustache": "mustache",
1052
+ "moustached": "mustached",
1053
+ "moustaches": "mustaches",
1054
+ "moustachioed": "mustachioed",
1055
+ "multicoloured": "multicolored",
1056
+ "nationalisation": "nationalization",
1057
+ "nationalisations": "nationalizations",
1058
+ "nationalise": "nationalize",
1059
+ "nationalised": "nationalized",
1060
+ "nationalises": "nationalizes",
1061
+ "nationalising": "nationalizing",
1062
+ "naturalisation": "naturalization",
1063
+ "naturalise": "naturalize",
1064
+ "naturalised": "naturalized",
1065
+ "naturalises": "naturalizes",
1066
+ "naturalising": "naturalizing",
1067
+ "neighbour": "neighbor",
1068
+ "neighbourhood": "neighborhood",
1069
+ "neighbourhoods": "neighborhoods",
1070
+ "neighbouring": "neighboring",
1071
+ "neighbourliness": "neighborliness",
1072
+ "neighbourly": "neighborly",
1073
+ "neighbours": "neighbors",
1074
+ "neutralisation": "neutralization",
1075
+ "neutralise": "neutralize",
1076
+ "neutralised": "neutralized",
1077
+ "neutralises": "neutralizes",
1078
+ "neutralising": "neutralizing",
1079
+ "normalisation": "normalization",
1080
+ "normalise": "normalize",
1081
+ "normalised": "normalized",
1082
+ "normalises": "normalizes",
1083
+ "normalising": "normalizing",
1084
+ "odour": "odor",
1085
+ "odourless": "odorless",
1086
+ "odours": "odors",
1087
+ "oesophagus": "esophagus",
1088
+ "oesophaguses": "esophaguses",
1089
+ "oestrogen": "estrogen",
1090
+ "offence": "offense",
1091
+ "offences": "offenses",
1092
+ "omelette": "omelet",
1093
+ "omelettes": "omelets",
1094
+ "optimise": "optimize",
1095
+ "optimised": "optimized",
1096
+ "optimises": "optimizes",
1097
+ "optimising": "optimizing",
1098
+ "organisation": "organization",
1099
+ "organisational": "organizational",
1100
+ "organisations": "organizations",
1101
+ "organise": "organize",
1102
+ "organised": "organized",
1103
+ "organiser": "organizer",
1104
+ "organisers": "organizers",
1105
+ "organises": "organizes",
1106
+ "organising": "organizing",
1107
+ "orthopaedic": "orthopedic",
1108
+ "orthopaedics": "orthopedics",
1109
+ "ostracise": "ostracize",
1110
+ "ostracised": "ostracized",
1111
+ "ostracises": "ostracizes",
1112
+ "ostracising": "ostracizing",
1113
+ "outmanoeuvre": "outmaneuver",
1114
+ "outmanoeuvred": "outmaneuvered",
1115
+ "outmanoeuvres": "outmaneuvers",
1116
+ "outmanoeuvring": "outmaneuvering",
1117
+ "overemphasise": "overemphasize",
1118
+ "overemphasised": "overemphasized",
1119
+ "overemphasises": "overemphasizes",
1120
+ "overemphasising": "overemphasizing",
1121
+ "oxidisation": "oxidization",
1122
+ "oxidise": "oxidize",
1123
+ "oxidised": "oxidized",
1124
+ "oxidises": "oxidizes",
1125
+ "oxidising": "oxidizing",
1126
+ "paederast": "pederast",
1127
+ "paederasts": "pederasts",
1128
+ "paediatric": "pediatric",
1129
+ "paediatrician": "pediatrician",
1130
+ "paediatricians": "pediatricians",
1131
+ "paediatrics": "pediatrics",
1132
+ "paedophile": "pedophile",
1133
+ "paedophiles": "pedophiles",
1134
+ "paedophilia": "pedophilia",
1135
+ "palaeolithic": "paleolithic",
1136
+ "palaeontologist": "paleontologist",
1137
+ "palaeontologists": "paleontologists",
1138
+ "palaeontology": "paleontology",
1139
+ "panelled": "paneled",
1140
+ "panelling": "paneling",
1141
+ "panellist": "panelist",
1142
+ "panellists": "panelists",
1143
+ "paralyse": "paralyze",
1144
+ "paralysed": "paralyzed",
1145
+ "paralyses": "paralyzes",
1146
+ "paralysing": "paralyzing",
1147
+ "parcelled": "parceled",
1148
+ "parcelling": "parceling",
1149
+ "parlour": "parlor",
1150
+ "parlours": "parlors",
1151
+ "particularise": "particularize",
1152
+ "particularised": "particularized",
1153
+ "particularises": "particularizes",
1154
+ "particularising": "particularizing",
1155
+ "passivisation": "passivization",
1156
+ "passivise": "passivize",
1157
+ "passivised": "passivized",
1158
+ "passivises": "passivizes",
1159
+ "passivising": "passivizing",
1160
+ "pasteurisation": "pasteurization",
1161
+ "pasteurise": "pasteurize",
1162
+ "pasteurised": "pasteurized",
1163
+ "pasteurises": "pasteurizes",
1164
+ "pasteurising": "pasteurizing",
1165
+ "patronise": "patronize",
1166
+ "patronised": "patronized",
1167
+ "patronises": "patronizes",
1168
+ "patronising": "patronizing",
1169
+ "patronisingly": "patronizingly",
1170
+ "pedalled": "pedaled",
1171
+ "pedalling": "pedaling",
1172
+ "pedestrianisation": "pedestrianization",
1173
+ "pedestrianise": "pedestrianize",
1174
+ "pedestrianised": "pedestrianized",
1175
+ "pedestrianises": "pedestrianizes",
1176
+ "pedestrianising": "pedestrianizing",
1177
+ "penalise": "penalize",
1178
+ "penalised": "penalized",
1179
+ "penalises": "penalizes",
1180
+ "penalising": "penalizing",
1181
+ "pencilled": "penciled",
1182
+ "pencilling": "penciling",
1183
+ "personalise": "personalize",
1184
+ "personalised": "personalized",
1185
+ "personalises": "personalizes",
1186
+ "personalising": "personalizing",
1187
+ "pharmacopoeia": "pharmacopeia",
1188
+ "pharmacopoeias": "pharmacopeias",
1189
+ "philosophise": "philosophize",
1190
+ "philosophised": "philosophized",
1191
+ "philosophises": "philosophizes",
1192
+ "philosophising": "philosophizing",
1193
+ "philtre": "filter",
1194
+ "philtres": "filters",
1195
+ "phoney": "phony",
1196
+ "plagiarise": "plagiarize",
1197
+ "plagiarised": "plagiarized",
1198
+ "plagiarises": "plagiarizes",
1199
+ "plagiarising": "plagiarizing",
1200
+ "plough": "plow",
1201
+ "ploughed": "plowed",
1202
+ "ploughing": "plowing",
1203
+ "ploughman": "plowman",
1204
+ "ploughmen": "plowmen",
1205
+ "ploughs": "plows",
1206
+ "ploughshare": "plowshare",
1207
+ "ploughshares": "plowshares",
1208
+ "polarisation": "polarization",
1209
+ "polarise": "polarize",
1210
+ "polarised": "polarized",
1211
+ "polarises": "polarizes",
1212
+ "polarising": "polarizing",
1213
+ "politicisation": "politicization",
1214
+ "politicise": "politicize",
1215
+ "politicised": "politicized",
1216
+ "politicises": "politicizes",
1217
+ "politicising": "politicizing",
1218
+ "popularisation": "popularization",
1219
+ "popularise": "popularize",
1220
+ "popularised": "popularized",
1221
+ "popularises": "popularizes",
1222
+ "popularising": "popularizing",
1223
+ "pouffe": "pouf",
1224
+ "pouffes": "poufs",
1225
+ "practise": "practice",
1226
+ "practised": "practiced",
1227
+ "practises": "practices",
1228
+ "practising": "practicing",
1229
+ "praesidium": "presidium",
1230
+ "praesidiums": "presidiums",
1231
+ "pressurisation": "pressurization",
1232
+ "pressurise": "pressurize",
1233
+ "pressurised": "pressurized",
1234
+ "pressurises": "pressurizes",
1235
+ "pressurising": "pressurizing",
1236
+ "pretence": "pretense",
1237
+ "pretences": "pretenses",
1238
+ "primaeval": "primeval",
1239
+ "prioritisation": "prioritization",
1240
+ "prioritise": "prioritize",
1241
+ "prioritised": "prioritized",
1242
+ "prioritises": "prioritizes",
1243
+ "prioritising": "prioritizing",
1244
+ "privatisation": "privatization",
1245
+ "privatisations": "privatizations",
1246
+ "privatise": "privatize",
1247
+ "privatised": "privatized",
1248
+ "privatises": "privatizes",
1249
+ "privatising": "privatizing",
1250
+ "professionalisation": "professionalization",
1251
+ "professionalise": "professionalize",
1252
+ "professionalised": "professionalized",
1253
+ "professionalises": "professionalizes",
1254
+ "professionalising": "professionalizing",
1255
+ "programme": "program",
1256
+ "programmes": "programs",
1257
+ "prologue": "prolog",
1258
+ "prologues": "prologs",
1259
+ "propagandise": "propagandize",
1260
+ "propagandised": "propagandized",
1261
+ "propagandises": "propagandizes",
1262
+ "propagandising": "propagandizing",
1263
+ "proselytise": "proselytize",
1264
+ "proselytised": "proselytized",
1265
+ "proselytiser": "proselytizer",
1266
+ "proselytisers": "proselytizers",
1267
+ "proselytises": "proselytizes",
1268
+ "proselytising": "proselytizing",
1269
+ "psychoanalyse": "psychoanalyze",
1270
+ "psychoanalysed": "psychoanalyzed",
1271
+ "psychoanalyses": "psychoanalyzes",
1272
+ "psychoanalysing": "psychoanalyzing",
1273
+ "publicise": "publicize",
1274
+ "publicised": "publicized",
1275
+ "publicises": "publicizes",
1276
+ "publicising": "publicizing",
1277
+ "pulverisation": "pulverization",
1278
+ "pulverise": "pulverize",
1279
+ "pulverised": "pulverized",
1280
+ "pulverises": "pulverizes",
1281
+ "pulverising": "pulverizing",
1282
+ "pummelled": "pummel",
1283
+ "pummelling": "pummeled",
1284
+ "pyjama": "pajama",
1285
+ "pyjamas": "pajamas",
1286
+ "pzazz": "pizzazz",
1287
+ "quarrelled": "quarreled",
1288
+ "quarrelling": "quarreling",
1289
+ "radicalise": "radicalize",
1290
+ "radicalised": "radicalized",
1291
+ "radicalises": "radicalizes",
1292
+ "radicalising": "radicalizing",
1293
+ "rancour": "rancor",
1294
+ "randomise": "randomize",
1295
+ "randomised": "randomized",
1296
+ "randomises": "randomizes",
1297
+ "randomising": "randomizing",
1298
+ "rationalisation": "rationalization",
1299
+ "rationalisations": "rationalizations",
1300
+ "rationalise": "rationalize",
1301
+ "rationalised": "rationalized",
1302
+ "rationalises": "rationalizes",
1303
+ "rationalising": "rationalizing",
1304
+ "ravelled": "raveled",
1305
+ "ravelling": "raveling",
1306
+ "realisable": "realizable",
1307
+ "realisation": "realization",
1308
+ "realisations": "realizations",
1309
+ "realise": "realize",
1310
+ "realised": "realized",
1311
+ "realises": "realizes",
1312
+ "realising": "realizing",
1313
+ "recognisable": "recognizable",
1314
+ "recognisably": "recognizably",
1315
+ "recognisance": "recognizance",
1316
+ "recognise": "recognize",
1317
+ "recognised": "recognized",
1318
+ "recognises": "recognizes",
1319
+ "recognising": "recognizing",
1320
+ "reconnoitre": "reconnoiter",
1321
+ "reconnoitred": "reconnoitered",
1322
+ "reconnoitres": "reconnoiters",
1323
+ "reconnoitring": "reconnoitering",
1324
+ "refuelled": "refueled",
1325
+ "refuelling": "refueling",
1326
+ "regularisation": "regularization",
1327
+ "regularise": "regularize",
1328
+ "regularised": "regularized",
1329
+ "regularises": "regularizes",
1330
+ "regularising": "regularizing",
1331
+ "remodelled": "remodeled",
1332
+ "remodelling": "remodeling",
1333
+ "remould": "remold",
1334
+ "remoulded": "remolded",
1335
+ "remoulding": "remolding",
1336
+ "remoulds": "remolds",
1337
+ "reorganisation": "reorganization",
1338
+ "reorganisations": "reorganizations",
1339
+ "reorganise": "reorganize",
1340
+ "reorganised": "reorganized",
1341
+ "reorganises": "reorganizes",
1342
+ "reorganising": "reorganizing",
1343
+ "revelled": "reveled",
1344
+ "reveller": "reveler",
1345
+ "revellers": "revelers",
1346
+ "revelling": "reveling",
1347
+ "revitalise": "revitalize",
1348
+ "revitalised": "revitalized",
1349
+ "revitalises": "revitalizes",
1350
+ "revitalising": "revitalizing",
1351
+ "revolutionise": "revolutionize",
1352
+ "revolutionised": "revolutionized",
1353
+ "revolutionises": "revolutionizes",
1354
+ "revolutionising": "revolutionizing",
1355
+ "rhapsodise": "rhapsodize",
1356
+ "rhapsodised": "rhapsodized",
1357
+ "rhapsodises": "rhapsodizes",
1358
+ "rhapsodising": "rhapsodizing",
1359
+ "rigour": "rigor",
1360
+ "rigours": "rigors",
1361
+ "ritualised": "ritualized",
1362
+ "rivalled": "rivaled",
1363
+ "rivalling": "rivaling",
1364
+ "romanticise": "romanticize",
1365
+ "romanticised": "romanticized",
1366
+ "romanticises": "romanticizes",
1367
+ "romanticising": "romanticizing",
1368
+ "rumour": "rumor",
1369
+ "rumoured": "rumored",
1370
+ "rumours": "rumors",
1371
+ "sabre": "saber",
1372
+ "sabres": "sabers",
1373
+ "saltpetre": "saltpeter",
1374
+ "sanitise": "sanitize",
1375
+ "sanitised": "sanitized",
1376
+ "sanitises": "sanitizes",
1377
+ "sanitising": "sanitizing",
1378
+ "satirise": "satirize",
1379
+ "satirised": "satirized",
1380
+ "satirises": "satirizes",
1381
+ "satirising": "satirizing",
1382
+ "saviour": "savior",
1383
+ "saviours": "saviors",
1384
+ "savour": "savor",
1385
+ "savoured": "savored",
1386
+ "savouries": "savories",
1387
+ "savouring": "savoring",
1388
+ "savours": "savors",
1389
+ "savoury": "savory",
1390
+ "scandalise": "scandalize",
1391
+ "scandalised": "scandalized",
1392
+ "scandalises": "scandalizes",
1393
+ "scandalising": "scandalizing",
1394
+ "sceptic": "skeptic",
1395
+ "sceptical": "skeptical",
1396
+ "sceptically": "skeptically",
1397
+ "scepticism": "skepticism",
1398
+ "sceptics": "skeptics",
1399
+ "sceptre": "scepter",
1400
+ "sceptres": "scepters",
1401
+ "scrutinise": "scrutinize",
1402
+ "scrutinised": "scrutinized",
1403
+ "scrutinises": "scrutinizes",
1404
+ "scrutinising": "scrutinizing",
1405
+ "secularisation": "secularization",
1406
+ "secularise": "secularize",
1407
+ "secularised": "secularized",
1408
+ "secularises": "secularizes",
1409
+ "secularising": "secularizing",
1410
+ "sensationalise": "sensationalize",
1411
+ "sensationalised": "sensationalized",
1412
+ "sensationalises": "sensationalizes",
1413
+ "sensationalising": "sensationalizing",
1414
+ "sensitise": "sensitize",
1415
+ "sensitised": "sensitized",
1416
+ "sensitises": "sensitizes",
1417
+ "sensitising": "sensitizing",
1418
+ "sentimentalise": "sentimentalize",
1419
+ "sentimentalised": "sentimentalized",
1420
+ "sentimentalises": "sentimentalizes",
1421
+ "sentimentalising": "sentimentalizing",
1422
+ "sepulchre": "sepulcher",
1423
+ "sepulchres": "sepulchers",
1424
+ "serialisation": "serialization",
1425
+ "serialisations": "serializations",
1426
+ "serialise": "serialize",
1427
+ "serialised": "serialized",
1428
+ "serialises": "serializes",
1429
+ "serialising": "serializing",
1430
+ "sermonise": "sermonize",
1431
+ "sermonised": "sermonized",
1432
+ "sermonises": "sermonizes",
1433
+ "sermonising": "sermonizing",
1434
+ "sheikh": "sheik",
1435
+ "shovelled": "shoveled",
1436
+ "shovelling": "shoveling",
1437
+ "shrivelled": "shriveled",
1438
+ "shrivelling": "shriveling",
1439
+ "signalise": "signalize",
1440
+ "signalised": "signalized",
1441
+ "signalises": "signalizes",
1442
+ "signalising": "signalizing",
1443
+ "signalled": "signaled",
1444
+ "signalling": "signaling",
1445
+ "smoulder": "smolder",
1446
+ "smouldered": "smoldered",
1447
+ "smouldering": "smoldering",
1448
+ "smoulders": "smolders",
1449
+ "snivelled": "sniveled",
1450
+ "snivelling": "sniveling",
1451
+ "snorkelled": "snorkeled",
1452
+ "snorkelling": "snorkeling",
1453
+ "snowplough": "snowplow",
1454
+ "snowploughs": "snowplow",
1455
+ "socialisation": "socialization",
1456
+ "socialise": "socialize",
1457
+ "socialised": "socialized",
1458
+ "socialises": "socializes",
1459
+ "socialising": "socializing",
1460
+ "sodomise": "sodomize",
1461
+ "sodomised": "sodomized",
1462
+ "sodomises": "sodomizes",
1463
+ "sodomising": "sodomizing",
1464
+ "solemnise": "solemnize",
1465
+ "solemnised": "solemnized",
1466
+ "solemnises": "solemnizes",
1467
+ "solemnising": "solemnizing",
1468
+ "sombre": "somber",
1469
+ "specialisation": "specialization",
1470
+ "specialisations": "specializations",
1471
+ "specialise": "specialize",
1472
+ "specialised": "specialized",
1473
+ "specialises": "specializes",
1474
+ "specialising": "specializing",
1475
+ "spectre": "specter",
1476
+ "spectres": "specters",
1477
+ "spiralled": "spiraled",
1478
+ "spiralling": "spiraling",
1479
+ "splendour": "splendor",
1480
+ "splendours": "splendors",
1481
+ "squirrelled": "squirreled",
1482
+ "squirrelling": "squirreling",
1483
+ "stabilisation": "stabilization",
1484
+ "stabilise": "stabilize",
1485
+ "stabilised": "stabilized",
1486
+ "stabiliser": "stabilizer",
1487
+ "stabilisers": "stabilizers",
1488
+ "stabilises": "stabilizes",
1489
+ "stabilising": "stabilizing",
1490
+ "standardisation": "standardization",
1491
+ "standardise": "standardize",
1492
+ "standardised": "standardized",
1493
+ "standardises": "standardizes",
1494
+ "standardising": "standardizing",
1495
+ "stencilled": "stenciled",
1496
+ "stencilling": "stenciling",
1497
+ "sterilisation": "sterilization",
1498
+ "sterilisations": "sterilizations",
1499
+ "sterilise": "sterilize",
1500
+ "sterilised": "sterilized",
1501
+ "steriliser": "sterilizer",
1502
+ "sterilisers": "sterilizers",
1503
+ "sterilises": "sterilizes",
1504
+ "sterilising": "sterilizing",
1505
+ "stigmatisation": "stigmatization",
1506
+ "stigmatise": "stigmatize",
1507
+ "stigmatised": "stigmatized",
1508
+ "stigmatises": "stigmatizes",
1509
+ "stigmatising": "stigmatizing",
1510
+ "storey": "story",
1511
+ "storeys": "stories",
1512
+ "subsidisation": "subsidization",
1513
+ "subsidise": "subsidize",
1514
+ "subsidised": "subsidized",
1515
+ "subsidiser": "subsidizer",
1516
+ "subsidisers": "subsidizers",
1517
+ "subsidises": "subsidizes",
1518
+ "subsidising": "subsidizing",
1519
+ "succour": "succor",
1520
+ "succoured": "succored",
1521
+ "succouring": "succoring",
1522
+ "succours": "succors",
1523
+ "sulphate": "sulfate",
1524
+ "sulphates": "sulfates",
1525
+ "sulphide": "sulfide",
1526
+ "sulphides": "sulfides",
1527
+ "sulphur": "sulfur",
1528
+ "sulphurous": "sulfurous",
1529
+ "summarise": "summarize",
1530
+ "summarised": "summarized",
1531
+ "summarises": "summarizes",
1532
+ "summarising": "summarizing",
1533
+ "swivelled": "swiveled",
1534
+ "swivelling": "swiveling",
1535
+ "symbolise": "symbolize",
1536
+ "symbolised": "symbolized",
1537
+ "symbolises": "symbolizes",
1538
+ "symbolising": "symbolizing",
1539
+ "sympathise": "sympathize",
1540
+ "sympathised": "sympathized",
1541
+ "sympathiser": "sympathizer",
1542
+ "sympathisers": "sympathizers",
1543
+ "sympathises": "sympathizes",
1544
+ "sympathising": "sympathizing",
1545
+ "synchronisation": "synchronization",
1546
+ "synchronise": "synchronize",
1547
+ "synchronised": "synchronized",
1548
+ "synchronises": "synchronizes",
1549
+ "synchronising": "synchronizing",
1550
+ "synthesise": "synthesize",
1551
+ "synthesised": "synthesized",
1552
+ "synthesiser": "synthesizer",
1553
+ "synthesisers": "synthesizers",
1554
+ "synthesises": "synthesizes",
1555
+ "synthesising": "synthesizing",
1556
+ "syphon": "siphon",
1557
+ "syphoned": "siphoned",
1558
+ "syphoning": "siphoning",
1559
+ "syphons": "siphons",
1560
+ "systematisation": "systematization",
1561
+ "systematise": "systematize",
1562
+ "systematised": "systematized",
1563
+ "systematises": "systematizes",
1564
+ "systematising": "systematizing",
1565
+ "tantalise": "tantalize",
1566
+ "tantalised": "tantalized",
1567
+ "tantalises": "tantalizes",
1568
+ "tantalising": "tantalizing",
1569
+ "tantalisingly": "tantalizingly",
1570
+ "tasselled": "tasseled",
1571
+ "technicolour": "technicolor",
1572
+ "temporise": "temporize",
1573
+ "temporised": "temporized",
1574
+ "temporises": "temporizes",
1575
+ "temporising": "temporizing",
1576
+ "tenderise": "tenderize",
1577
+ "tenderised": "tenderized",
1578
+ "tenderises": "tenderizes",
1579
+ "tenderising": "tenderizing",
1580
+ "terrorise": "terrorize",
1581
+ "terrorised": "terrorized",
1582
+ "terrorises": "terrorizes",
1583
+ "terrorising": "terrorizing",
1584
+ "theatre": "theater",
1585
+ "theatregoer": "theatergoer",
1586
+ "theatregoers": "theatergoers",
1587
+ "theatres": "theaters",
1588
+ "theorise": "theorize",
1589
+ "theorised": "theorized",
1590
+ "theorises": "theorizes",
1591
+ "theorising": "theorizing",
1592
+ "tonne": "ton",
1593
+ "tonnes": "tons",
1594
+ "towelled": "toweled",
1595
+ "towelling": "toweling",
1596
+ "toxaemia": "toxemia",
1597
+ "tranquillise": "tranquilize",
1598
+ "tranquillised": "tranquilized",
1599
+ "tranquilliser": "tranquilizer",
1600
+ "tranquillisers": "tranquilizers",
1601
+ "tranquillises": "tranquilizes",
1602
+ "tranquillising": "tranquilizing",
1603
+ "tranquillity": "tranquility",
1604
+ "tranquillize": "tranquilize",
1605
+ "tranquillized": "tranquilized",
1606
+ "tranquillizer": "tranquilizer",
1607
+ "tranquillizers": "tranquilizers",
1608
+ "tranquillizes": "tranquilizes",
1609
+ "tranquillizing": "tranquilizing",
1610
+ "tranquilly": "tranquility",
1611
+ "transistorised": "transistorized",
1612
+ "traumatise": "traumatize",
1613
+ "traumatised": "traumatized",
1614
+ "traumatises": "traumatizes",
1615
+ "traumatising": "traumatizing",
1616
+ "travelled": "traveled",
1617
+ "traveller": "traveler",
1618
+ "travellers": "travelers",
1619
+ "travelling": "traveling",
1620
+ "travelog": "travelogue",
1621
+ "travelogs": "travelogues",
1622
+ "trialled": "trialed",
1623
+ "trialling": "trialing",
1624
+ "tricolour": "tricolor",
1625
+ "tricolours": "tricolors",
1626
+ "trivialise": "trivialize",
1627
+ "trivialised": "trivialized",
1628
+ "trivialises": "trivializes",
1629
+ "trivialising": "trivializing",
1630
+ "tumour": "tumor",
1631
+ "tumours": "tumors",
1632
+ "tunnelled": "tunneled",
1633
+ "tunnelling": "tunneling",
1634
+ "tyrannise": "tyrannize",
1635
+ "tyrannised": "tyrannized",
1636
+ "tyrannises": "tyrannizes",
1637
+ "tyrannising": "tyrannizing",
1638
+ "tyre": "tire",
1639
+ "tyres": "tires",
1640
+ "unauthorised": "unauthorized",
1641
+ "uncivilised": "uncivilized",
1642
+ "underutilised": "underutilized",
1643
+ "unequalled": "unequaled",
1644
+ "unfavourable": "unfavorable",
1645
+ "unfavourably": "unfavorably",
1646
+ "unionisation": "unionization",
1647
+ "unionise": "unionize",
1648
+ "unionised": "unionized",
1649
+ "unionises": "unionizes",
1650
+ "unionising": "unionizing",
1651
+ "unorganised": "unorganized",
1652
+ "unravelled": "unraveled",
1653
+ "unravelling": "unraveling",
1654
+ "unrecognisable": "unrecognizable",
1655
+ "unrecognised": "unrecognized",
1656
+ "unrivalled": "unrivaled",
1657
+ "unsavoury": "unsavory",
1658
+ "untrammelled": "untrammeled",
1659
+ "urbanisation": "urbanization",
1660
+ "urbanise": "urbanize",
1661
+ "urbanised": "urbanized",
1662
+ "urbanises": "urbanizes",
1663
+ "urbanising": "urbanizing",
1664
+ "utilisable": "utilizable",
1665
+ "utilisation": "utilization",
1666
+ "utilise": "utilize",
1667
+ "utilised": "utilized",
1668
+ "utilises": "utilizes",
1669
+ "utilising": "utilizing",
1670
+ "valour": "valor",
1671
+ "vandalise": "vandalize",
1672
+ "vandalised": "vandalized",
1673
+ "vandalises": "vandalizes",
1674
+ "vandalising": "vandalizing",
1675
+ "vaporisation": "vaporization",
1676
+ "vaporise": "vaporize",
1677
+ "vaporised": "vaporized",
1678
+ "vaporises": "vaporizes",
1679
+ "vaporising": "vaporizing",
1680
+ "vapour": "vapor",
1681
+ "vapours": "vapors",
1682
+ "verbalise": "verbalize",
1683
+ "verbalised": "verbalized",
1684
+ "verbalises": "verbalizes",
1685
+ "verbalising": "verbalizing",
1686
+ "victimisation": "victimization",
1687
+ "victimise": "victimize",
1688
+ "victimised": "victimized",
1689
+ "victimises": "victimizes",
1690
+ "victimising": "victimizing",
1691
+ "videodisc": "videodisk",
1692
+ "videodiscs": "videodisks",
1693
+ "vigour": "vigor",
1694
+ "visualisation": "visualization",
1695
+ "visualisations": "visualizations",
1696
+ "visualise": "visualize",
1697
+ "visualised": "visualized",
1698
+ "visualises": "visualizes",
1699
+ "visualising": "visualizing",
1700
+ "vocalisation": "vocalization",
1701
+ "vocalisations": "vocalizations",
1702
+ "vocalise": "vocalize",
1703
+ "vocalised": "vocalized",
1704
+ "vocalises": "vocalizes",
1705
+ "vocalising": "vocalizing",
1706
+ "vulcanised": "vulcanized",
1707
+ "vulgarisation": "vulgarization",
1708
+ "vulgarise": "vulgarize",
1709
+ "vulgarised": "vulgarized",
1710
+ "vulgarises": "vulgarizes",
1711
+ "vulgarising": "vulgarizing",
1712
+ "waggon": "wagon",
1713
+ "waggons": "wagons",
1714
+ "watercolour": "watercolor",
1715
+ "watercolours": "watercolors",
1716
+ "weaselled": "weaseled",
1717
+ "weaselling": "weaseling",
1718
+ "westernisation": "westernization",
1719
+ "westernise": "westernize",
1720
+ "westernised": "westernized",
1721
+ "westernises": "westernizes",
1722
+ "westernising": "westernizing",
1723
+ "womanise": "womanize",
1724
+ "womanised": "womanized",
1725
+ "womaniser": "womanizer",
1726
+ "womanisers": "womanizers",
1727
+ "womanises": "womanizes",
1728
+ "womanising": "womanizing",
1729
+ "woollen": "woolen",
1730
+ "woollens": "woolens",
1731
+ "woollies": "woolies",
1732
+ "woolly": "wooly",
1733
+ "worshipped": "worshiped",
1734
+ "worshipper": "worshiper",
1735
+ "worshipping": "worshiping",
1736
+ "yodelled": "yodeled",
1737
+ "yodelling": "yodeling",
1738
+ "yoghourt": "yogurt",
1739
+ "yoghourts": "yogurts",
1740
+ "yoghurt": "yogurt",
1741
+ "yoghurts": "yogurts"
1742
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "chunk_length": 30,
3
+ "feature_extractor_type": "WhisperFeatureExtractor",
4
+ "feature_size": 128,
5
+ "hop_length": 160,
6
+ "n_fft": 400,
7
+ "n_samples": 480000,
8
+ "nb_max_frames": 3000,
9
+ "padding_side": "right",
10
+ "padding_value": 0.0,
11
+ "processor_class": "WhisperProcessor",
12
+ "return_attention_mask": false,
13
+ "sampling_rate": 16000
14
+ }
run.sh ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ python run_speech_recognition_seq2seq.py \
2
+ --model_name_or_path="distil-whisper/distil-large-v3" \
3
+ --dataset_name="mozilla-foundation/common_voice_16_1" \
4
+ --dataset_config_name="hi" \
5
+ --language="hindi" \
6
+ --train_split_name="train+validation" \
7
+ --eval_split_name="test" \
8
+ --max_steps="5000" \
9
+ --output_dir="./" \
10
+ --per_device_train_batch_size="32" \
11
+ --per_device_eval_batch_size="32" \
12
+ --logging_steps="25" \
13
+ --learning_rate="1e-4" \
14
+ --warmup_steps="500" \
15
+ --evaluation_strategy="steps" \
16
+ --eval_steps="1000" \
17
+ --save_strategy="steps" \
18
+ --save_steps="1000" \
19
+ --generation_max_length="225" \
20
+ --preprocessing_num_workers="1" \
21
+ --dataloader_num_workers="4" \
22
+ --length_column_name="input_length" \
23
+ --max_duration_in_seconds="30" \
24
+ --text_column_name="sentence" \
25
+ --freeze_feature_encoder="False" \
26
+ --freeze_encoder \
27
+ --gradient_checkpointing \
28
+ --fp16 \
29
+ --overwrite_output_dir \
30
+ --do_train \
31
+ --do_eval \
32
+ --predict_with_generate \
33
+ --use_auth_token \
34
+ --push_to_hub
run_speech_recognition_seq2seq.py ADDED
@@ -0,0 +1,627 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding=utf-8
3
+ # Copyright 2021 The HuggingFace Team. All rights reserved.
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+ """
17
+ Fine-tuning the library models for sequence to sequence speech recognition.
18
+ """
19
+ # You can also adapt this script on your own sequence to sequence speech
20
+ # recognition task. Pointers for this are left as comments.
21
+
22
+ import logging
23
+ import os
24
+ import sys
25
+ import warnings
26
+ from dataclasses import dataclass, field
27
+ from typing import Any, Dict, List, Optional, Union
28
+
29
+ import datasets
30
+ import evaluate
31
+ import torch
32
+ from datasets import DatasetDict, load_dataset
33
+
34
+ import transformers
35
+ from transformers import (
36
+ AutoConfig,
37
+ AutoFeatureExtractor,
38
+ AutoModelForSpeechSeq2Seq,
39
+ AutoProcessor,
40
+ AutoTokenizer,
41
+ HfArgumentParser,
42
+ Seq2SeqTrainer,
43
+ Seq2SeqTrainingArguments,
44
+ set_seed,
45
+ )
46
+ from transformers.trainer_utils import get_last_checkpoint, is_main_process
47
+ from transformers.utils import check_min_version, send_example_telemetry
48
+ from transformers.utils.versions import require_version
49
+
50
+
51
+ # Will error if the minimal version of Transformers is not installed. Remove at your own risks.
52
+ check_min_version("4.40.0.dev0")
53
+
54
+ require_version("datasets>=1.18.0", "To fix: pip install -r examples/pytorch/speech-recognition/requirements.txt")
55
+
56
+ logger = logging.getLogger(__name__)
57
+
58
+
59
+ @dataclass
60
+ class ModelArguments:
61
+ """
62
+ Arguments pertaining to which model/config/tokenizer we are going to fine-tune from.
63
+ """
64
+
65
+ model_name_or_path: str = field(
66
+ metadata={"help": "Path to pretrained model or model identifier from huggingface.co/models"}
67
+ )
68
+ config_name: Optional[str] = field(
69
+ default=None, metadata={"help": "Pretrained config name or path if not the same as model_name"}
70
+ )
71
+ tokenizer_name: Optional[str] = field(
72
+ default=None, metadata={"help": "Pretrained tokenizer name or path if not the same as model_name"}
73
+ )
74
+ feature_extractor_name: Optional[str] = field(
75
+ default=None, metadata={"help": "feature extractor name or path if not the same as model_name"}
76
+ )
77
+ cache_dir: Optional[str] = field(
78
+ default=None,
79
+ metadata={"help": "Where to store the pretrained models downloaded from huggingface.co"},
80
+ )
81
+ use_fast_tokenizer: bool = field(
82
+ default=True,
83
+ metadata={"help": "Whether to use one of the fast tokenizer (backed by the tokenizers library) or not."},
84
+ )
85
+ model_revision: str = field(
86
+ default="main",
87
+ metadata={"help": "The specific model version to use (can be a branch name, tag name or commit id)."},
88
+ )
89
+ token: str = field(
90
+ default=None,
91
+ metadata={
92
+ "help": (
93
+ "The token to use as HTTP bearer authorization for remote files. If not specified, will use the token "
94
+ "generated when running `huggingface-cli login` (stored in `~/.huggingface`)."
95
+ )
96
+ },
97
+ )
98
+ use_auth_token: bool = field(
99
+ default=None,
100
+ metadata={
101
+ "help": "The `use_auth_token` argument is deprecated and will be removed in v4.34. Please use `token` instead."
102
+ },
103
+ )
104
+ trust_remote_code: bool = field(
105
+ default=False,
106
+ metadata={
107
+ "help": (
108
+ "Whether or not to allow for custom models defined on the Hub in their own modeling files. This option "
109
+ "should only be set to `True` for repositories you trust and in which you have read the code, as it will "
110
+ "execute code present on the Hub on your local machine."
111
+ )
112
+ },
113
+ )
114
+ freeze_feature_encoder: bool = field(
115
+ default=True, metadata={"help": "Whether to freeze the feature encoder layers of the model."}
116
+ )
117
+ freeze_encoder: bool = field(
118
+ default=False, metadata={"help": "Whether to freeze the entire encoder of the seq2seq model."}
119
+ )
120
+ forced_decoder_ids: List[List[int]] = field(
121
+ default=None,
122
+ metadata={
123
+ "help": (
124
+ "A list of pairs of integers which indicates a mapping from generation indices to token indices "
125
+ "that will be forced before sampling. For example, [[0, 123]] means the first generated token "
126
+ "will always be a token of index 123."
127
+ )
128
+ },
129
+ )
130
+ suppress_tokens: List[int] = field(
131
+ default=None, metadata={"help": "A list of tokens that will be suppressed at generation."}
132
+ )
133
+ apply_spec_augment: bool = field(
134
+ default=False,
135
+ metadata={
136
+ "help": "Whether to apply *SpecAugment* data augmentation to the input features. This is currently only relevant for Wav2Vec2, HuBERT, WavLM and Whisper models."
137
+ },
138
+ )
139
+
140
+
141
+ @dataclass
142
+ class DataTrainingArguments:
143
+ """
144
+ Arguments pertaining to what data we are going to input our model for training and eval.
145
+ """
146
+
147
+ dataset_name: str = field(
148
+ default=None, metadata={"help": "The name of the dataset to use (via the datasets library)."}
149
+ )
150
+ dataset_config_name: Optional[str] = field(
151
+ default=None, metadata={"help": "The configuration name of the dataset to use (via the datasets library)."}
152
+ )
153
+ overwrite_cache: bool = field(
154
+ default=False, metadata={"help": "Overwrite the cached training and evaluation sets"}
155
+ )
156
+ preprocessing_num_workers: Optional[int] = field(
157
+ default=None,
158
+ metadata={"help": "The number of processes to use for the preprocessing."},
159
+ )
160
+ max_train_samples: Optional[int] = field(
161
+ default=None,
162
+ metadata={
163
+ "help": (
164
+ "For debugging purposes or quicker training, truncate the number of training examples to this "
165
+ "value if set."
166
+ )
167
+ },
168
+ )
169
+ max_eval_samples: Optional[int] = field(
170
+ default=None,
171
+ metadata={
172
+ "help": (
173
+ "For debugging purposes or quicker training, truncate the number of evaluation examples to this "
174
+ "value if set."
175
+ )
176
+ },
177
+ )
178
+ audio_column_name: str = field(
179
+ default="audio",
180
+ metadata={"help": "The name of the dataset column containing the audio data. Defaults to 'audio'"},
181
+ )
182
+ text_column_name: str = field(
183
+ default="text",
184
+ metadata={"help": "The name of the dataset column containing the text data. Defaults to 'text'"},
185
+ )
186
+ max_duration_in_seconds: float = field(
187
+ default=20.0,
188
+ metadata={
189
+ "help": (
190
+ "Truncate audio files that are longer than `max_duration_in_seconds` seconds to"
191
+ " 'max_duration_in_seconds`"
192
+ )
193
+ },
194
+ )
195
+ min_duration_in_seconds: float = field(
196
+ default=0.0, metadata={"help": "Filter audio files that are shorter than `min_duration_in_seconds` seconds"}
197
+ )
198
+ preprocessing_only: bool = field(
199
+ default=False,
200
+ metadata={
201
+ "help": (
202
+ "Whether to only do data preprocessing and skip training. This is especially useful when data"
203
+ " preprocessing errors out in distributed training due to timeout. In this case, one should run the"
204
+ " preprocessing in a non-distributed setup with `preprocessing_only=True` so that the cached datasets"
205
+ " can consequently be loaded in distributed training"
206
+ )
207
+ },
208
+ )
209
+ train_split_name: str = field(
210
+ default="train",
211
+ metadata={
212
+ "help": "The name of the training data set split to use (via the datasets library). Defaults to 'train'"
213
+ },
214
+ )
215
+ eval_split_name: str = field(
216
+ default="test",
217
+ metadata={
218
+ "help": "The name of the training data set split to use (via the datasets library). Defaults to 'train'"
219
+ },
220
+ )
221
+ do_lower_case: bool = field(
222
+ default=True,
223
+ metadata={"help": "Whether the target text should be lower cased."},
224
+ )
225
+ language: str = field(
226
+ default=None,
227
+ metadata={
228
+ "help": (
229
+ "Language for multilingual fine-tuning. This argument should be set for multilingual fine-tuning "
230
+ "only. For English speech recognition, it should be set to `None`."
231
+ )
232
+ },
233
+ )
234
+ task: str = field(
235
+ default="transcribe",
236
+ metadata={"help": "Task, either `transcribe` for speech recognition or `translate` for speech translation."},
237
+ )
238
+
239
+
240
+ @dataclass
241
+ class DataCollatorSpeechSeq2SeqWithPadding:
242
+ """
243
+ Data collator that will dynamically pad the inputs received.
244
+ Args:
245
+ processor ([`WhisperProcessor`])
246
+ The processor used for processing the data.
247
+ decoder_start_token_id (`int`)
248
+ The begin-of-sentence of the decoder.
249
+ forward_attention_mask (`bool`)
250
+ Whether to return attention_mask.
251
+ """
252
+
253
+ processor: Any
254
+ decoder_start_token_id: int
255
+ forward_attention_mask: bool
256
+
257
+ def __call__(self, features: List[Dict[str, Union[List[int], torch.Tensor]]]) -> Dict[str, torch.Tensor]:
258
+ # split inputs and labels since they have to be of different lengths and need
259
+ # different padding methods
260
+ model_input_name = self.processor.model_input_names[0]
261
+ input_features = [{model_input_name: feature[model_input_name]} for feature in features]
262
+ label_features = [{"input_ids": feature["labels"]} for feature in features]
263
+
264
+ batch = self.processor.feature_extractor.pad(input_features, return_tensors="pt")
265
+
266
+ if self.forward_attention_mask:
267
+ batch["attention_mask"] = torch.LongTensor([feature["attention_mask"] for feature in features])
268
+
269
+ labels_batch = self.processor.tokenizer.pad(label_features, return_tensors="pt")
270
+
271
+ # replace padding with -100 to ignore loss correctly
272
+ labels = labels_batch["input_ids"].masked_fill(labels_batch.attention_mask.ne(1), -100)
273
+
274
+ # if bos token is appended in previous tokenization step,
275
+ # cut bos token here as it's append later anyways
276
+ if (labels[:, 0] == self.decoder_start_token_id).all().cpu().item():
277
+ labels = labels[:, 1:]
278
+
279
+ batch["labels"] = labels
280
+
281
+ return batch
282
+
283
+
284
+ def main():
285
+ # 1. Parse input arguments
286
+ # See all possible arguments in src/transformers/training_args.py
287
+ # or by passing the --help flag to this script.
288
+ # We now keep distinct sets of args, for a cleaner separation of concerns.
289
+ parser = HfArgumentParser((ModelArguments, DataTrainingArguments, Seq2SeqTrainingArguments))
290
+
291
+ if len(sys.argv) == 2 and sys.argv[1].endswith(".json"):
292
+ # If we pass only one argument to the script and it's the path to a json file,
293
+ # let's parse it to get our arguments.
294
+ model_args, data_args, training_args = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
295
+ else:
296
+ model_args, data_args, training_args = parser.parse_args_into_dataclasses()
297
+
298
+ if model_args.use_auth_token is not None:
299
+ warnings.warn(
300
+ "The `use_auth_token` argument is deprecated and will be removed in v4.34. Please use `token` instead.",
301
+ FutureWarning,
302
+ )
303
+ if model_args.token is not None:
304
+ raise ValueError("`token` and `use_auth_token` are both specified. Please set only the argument `token`.")
305
+ model_args.token = model_args.use_auth_token
306
+
307
+ # Sending telemetry. Tracking the example usage helps us better allocate resources to maintain them. The
308
+ # information sent is the one passed as arguments along with your Python/PyTorch versions.
309
+ send_example_telemetry("run_speech_recognition_seq2seq", model_args, data_args)
310
+
311
+ # 2. Setup logging
312
+ logging.basicConfig(
313
+ format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
314
+ datefmt="%m/%d/%Y %H:%M:%S",
315
+ handlers=[logging.StreamHandler(sys.stdout)],
316
+ )
317
+ log_level = training_args.get_process_log_level()
318
+ logger.setLevel(log_level)
319
+ datasets.utils.logging.set_verbosity(log_level)
320
+ transformers.utils.logging.set_verbosity(log_level)
321
+ transformers.utils.logging.enable_default_handler()
322
+ transformers.utils.logging.enable_explicit_format()
323
+
324
+ logger.setLevel(logging.INFO if is_main_process(training_args.local_rank) else logging.WARN)
325
+
326
+ # Log on each process the small summary:
327
+ logger.warning(
328
+ f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}, "
329
+ f"distributed training: {training_args.parallel_mode.value == 'distributed'}, 16-bits training: {training_args.fp16}"
330
+ )
331
+ logger.info(f"Training/evaluation parameters {training_args}")
332
+
333
+ # Set the verbosity to info of the Transformers logger (on main process only):
334
+ if is_main_process(training_args.local_rank):
335
+ transformers.utils.logging.set_verbosity_info()
336
+ logger.info("Training/evaluation parameters %s", training_args)
337
+
338
+ # 3. Detecting last checkpoint and eventually continue from last checkpoint
339
+ last_checkpoint = None
340
+ if os.path.isdir(training_args.output_dir) and training_args.do_train and not training_args.overwrite_output_dir:
341
+ last_checkpoint = get_last_checkpoint(training_args.output_dir)
342
+ if last_checkpoint is None and len(os.listdir(training_args.output_dir)) > 0:
343
+ raise ValueError(
344
+ f"Output directory ({training_args.output_dir}) already exists and is not empty. "
345
+ "Use --overwrite_output_dir to overcome."
346
+ )
347
+ elif last_checkpoint is not None and training_args.resume_from_checkpoint is None:
348
+ logger.info(
349
+ f"Checkpoint detected, resuming training at {last_checkpoint}. To avoid this behavior, change "
350
+ "the `--output_dir` or add `--overwrite_output_dir` to train from scratch."
351
+ )
352
+
353
+ # Set seed before initializing model.
354
+ set_seed(training_args.seed)
355
+
356
+ # 4. Load dataset
357
+ raw_datasets = DatasetDict()
358
+
359
+ if training_args.do_train:
360
+ raw_datasets["train"] = load_dataset(
361
+ data_args.dataset_name,
362
+ data_args.dataset_config_name,
363
+ split=data_args.train_split_name,
364
+ cache_dir=model_args.cache_dir,
365
+ token=model_args.token,
366
+ )
367
+
368
+ if training_args.do_eval:
369
+ raw_datasets["eval"] = load_dataset(
370
+ data_args.dataset_name,
371
+ data_args.dataset_config_name,
372
+ split=data_args.eval_split_name,
373
+ cache_dir=model_args.cache_dir,
374
+ token=model_args.token,
375
+ )
376
+
377
+ if data_args.audio_column_name not in next(iter(raw_datasets.values())).column_names:
378
+ raise ValueError(
379
+ f"--audio_column_name '{data_args.audio_column_name}' not found in dataset '{data_args.dataset_name}'. "
380
+ "Make sure to set `--audio_column_name` to the correct audio column - one of "
381
+ f"{', '.join(next(iter(raw_datasets.values())).column_names)}."
382
+ )
383
+
384
+ if data_args.text_column_name not in next(iter(raw_datasets.values())).column_names:
385
+ raise ValueError(
386
+ f"--text_column_name {data_args.text_column_name} not found in dataset '{data_args.dataset_name}'. "
387
+ "Make sure to set `--text_column_name` to the correct text column - one of "
388
+ f"{', '.join(next(iter(raw_datasets.values())).column_names)}."
389
+ )
390
+
391
+ # 5. Load pretrained model, tokenizer, and feature extractor
392
+ #
393
+ # Distributed training:
394
+ # The .from_pretrained methods guarantee that only one local process can concurrently
395
+ config = AutoConfig.from_pretrained(
396
+ model_args.config_name if model_args.config_name else model_args.model_name_or_path,
397
+ cache_dir=model_args.cache_dir,
398
+ revision=model_args.model_revision,
399
+ token=model_args.token,
400
+ trust_remote_code=model_args.trust_remote_code,
401
+ )
402
+
403
+ config.update({"forced_decoder_ids": model_args.forced_decoder_ids, "suppress_tokens": model_args.suppress_tokens})
404
+
405
+ # SpecAugment for whisper models
406
+ if getattr(config, "model_type", None) == "whisper":
407
+ config.update({"apply_spec_augment": model_args.apply_spec_augment})
408
+
409
+ feature_extractor = AutoFeatureExtractor.from_pretrained(
410
+ model_args.feature_extractor_name if model_args.feature_extractor_name else model_args.model_name_or_path,
411
+ cache_dir=model_args.cache_dir,
412
+ revision=model_args.model_revision,
413
+ token=model_args.token,
414
+ trust_remote_code=model_args.trust_remote_code,
415
+ )
416
+ tokenizer = AutoTokenizer.from_pretrained(
417
+ model_args.tokenizer_name if model_args.tokenizer_name else model_args.model_name_or_path,
418
+ cache_dir=model_args.cache_dir,
419
+ use_fast=model_args.use_fast_tokenizer,
420
+ revision=model_args.model_revision,
421
+ token=model_args.token,
422
+ trust_remote_code=model_args.trust_remote_code,
423
+ )
424
+ model = AutoModelForSpeechSeq2Seq.from_pretrained(
425
+ model_args.model_name_or_path,
426
+ config=config,
427
+ cache_dir=model_args.cache_dir,
428
+ revision=model_args.model_revision,
429
+ token=model_args.token,
430
+ trust_remote_code=model_args.trust_remote_code,
431
+ )
432
+
433
+ if model.config.decoder_start_token_id is None:
434
+ raise ValueError("Make sure that `config.decoder_start_token_id` is correctly defined")
435
+
436
+ if model_args.freeze_feature_encoder:
437
+ model.freeze_feature_encoder()
438
+
439
+ if model_args.freeze_encoder:
440
+ model.freeze_encoder()
441
+ model.model.encoder.gradient_checkpointing = False
442
+
443
+ if data_args.language is not None:
444
+ # We only need to set the task id when the language is specified (i.e. in a multilingual setting)
445
+ tokenizer.set_prefix_tokens(language=data_args.language, task=data_args.task)
446
+ model.generation_config.language = data_args.language
447
+ model.generation_config.task = data_args.task
448
+
449
+ # 6. Resample speech dataset if necessary
450
+ dataset_sampling_rate = next(iter(raw_datasets.values())).features[data_args.audio_column_name].sampling_rate
451
+ if dataset_sampling_rate != feature_extractor.sampling_rate:
452
+ raw_datasets = raw_datasets.cast_column(
453
+ data_args.audio_column_name, datasets.features.Audio(sampling_rate=feature_extractor.sampling_rate)
454
+ )
455
+
456
+ # 7. Preprocessing the datasets.
457
+ # We need to read the audio files as arrays and tokenize the targets.
458
+ max_input_length = data_args.max_duration_in_seconds * feature_extractor.sampling_rate
459
+ min_input_length = data_args.min_duration_in_seconds * feature_extractor.sampling_rate
460
+ audio_column_name = data_args.audio_column_name
461
+ num_workers = data_args.preprocessing_num_workers
462
+ text_column_name = data_args.text_column_name
463
+ model_input_name = feature_extractor.model_input_names[0]
464
+ do_lower_case = data_args.do_lower_case
465
+ # if SpecAugment is used for whisper models, return attention_mask to guide the mask along time axis
466
+ forward_attention_mask = (
467
+ getattr(config, "model_type", None) == "whisper"
468
+ and getattr(config, "apply_spec_augment", False)
469
+ and getattr(config, "mask_time_prob", 0) > 0
470
+ )
471
+
472
+ if data_args.max_train_samples is not None:
473
+ raw_datasets["train"] = raw_datasets["train"].select(range(data_args.max_train_samples))
474
+
475
+ if data_args.max_eval_samples is not None:
476
+ raw_datasets["eval"] = raw_datasets["eval"].select(range(data_args.max_eval_samples))
477
+
478
+ def prepare_dataset(batch):
479
+ # process audio
480
+ sample = batch[audio_column_name]
481
+ inputs = feature_extractor(
482
+ sample["array"], sampling_rate=sample["sampling_rate"], return_attention_mask=forward_attention_mask
483
+ )
484
+ # process audio length
485
+ batch[model_input_name] = inputs.get(model_input_name)[0]
486
+ batch["input_length"] = len(sample["array"])
487
+ if forward_attention_mask:
488
+ batch["attention_mask"] = inputs.get("attention_mask")[0]
489
+
490
+ # process targets
491
+ input_str = batch[text_column_name].lower() if do_lower_case else batch[text_column_name]
492
+ batch["labels"] = tokenizer(input_str).input_ids
493
+ return batch
494
+
495
+ with training_args.main_process_first(desc="dataset map pre-processing"):
496
+ vectorized_datasets = raw_datasets.map(
497
+ prepare_dataset,
498
+ remove_columns=next(iter(raw_datasets.values())).column_names,
499
+ num_proc=data_args.preprocessing_num_workers,
500
+ desc="preprocess train dataset",
501
+ )
502
+
503
+ # filter data that is shorter than min_input_length or longer than
504
+ # max_input_length
505
+ def is_audio_in_length_range(length):
506
+ return length > min_input_length and length < max_input_length
507
+
508
+ vectorized_datasets = vectorized_datasets.filter(
509
+ is_audio_in_length_range,
510
+ num_proc=num_workers,
511
+ input_columns=["input_length"],
512
+ )
513
+
514
+ # for large datasets it is advised to run the preprocessing on a
515
+ # single machine first with `args.preprocessing_only` since there will mostly likely
516
+ # be a timeout when running the script in distributed mode.
517
+ # In a second step `args.preprocessing_only` can then be set to `False` to load the
518
+ # cached dataset
519
+ if data_args.preprocessing_only:
520
+ cache = {k: v.cache_files for k, v in vectorized_datasets.items()}
521
+ logger.info(f"Data preprocessing finished. Files cached at {cache}.")
522
+ return
523
+
524
+ # 8. Load Metric
525
+ metric = evaluate.load("wer", cache_dir=model_args.cache_dir)
526
+
527
+ def compute_metrics(pred):
528
+ pred_ids = pred.predictions
529
+
530
+ pred.label_ids[pred.label_ids == -100] = tokenizer.pad_token_id
531
+
532
+ pred_str = tokenizer.batch_decode(pred_ids, skip_special_tokens=True)
533
+ # we do not want to group tokens when computing the metrics
534
+ label_str = tokenizer.batch_decode(pred.label_ids, skip_special_tokens=True)
535
+
536
+ wer = metric.compute(predictions=pred_str, references=label_str)
537
+
538
+ return {"wer": wer}
539
+
540
+ # 9. Create a single speech processor
541
+ # make sure all processes wait until data is saved
542
+ with training_args.main_process_first():
543
+ # only the main process saves them
544
+ if is_main_process(training_args.local_rank):
545
+ # save feature extractor, tokenizer and config
546
+ feature_extractor.save_pretrained(training_args.output_dir)
547
+ tokenizer.save_pretrained(training_args.output_dir)
548
+ config.save_pretrained(training_args.output_dir)
549
+
550
+ processor = AutoProcessor.from_pretrained(training_args.output_dir)
551
+
552
+ # 10. Define data collator
553
+ data_collator = DataCollatorSpeechSeq2SeqWithPadding(
554
+ processor=processor,
555
+ decoder_start_token_id=model.config.decoder_start_token_id,
556
+ forward_attention_mask=forward_attention_mask,
557
+ )
558
+
559
+ # 11. Initialize Trainer
560
+ trainer = Seq2SeqTrainer(
561
+ model=model,
562
+ args=training_args,
563
+ train_dataset=vectorized_datasets["train"] if training_args.do_train else None,
564
+ eval_dataset=vectorized_datasets["eval"] if training_args.do_eval else None,
565
+ tokenizer=feature_extractor,
566
+ data_collator=data_collator,
567
+ compute_metrics=compute_metrics if training_args.predict_with_generate else None,
568
+ )
569
+
570
+ # 12. Training
571
+ if training_args.do_train:
572
+ checkpoint = None
573
+ if training_args.resume_from_checkpoint is not None:
574
+ checkpoint = training_args.resume_from_checkpoint
575
+ elif last_checkpoint is not None:
576
+ checkpoint = last_checkpoint
577
+ train_result = trainer.train(resume_from_checkpoint=checkpoint)
578
+ trainer.save_model() # Saves the feature extractor too for easy upload
579
+
580
+ metrics = train_result.metrics
581
+ max_train_samples = (
582
+ data_args.max_train_samples
583
+ if data_args.max_train_samples is not None
584
+ else len(vectorized_datasets["train"])
585
+ )
586
+ metrics["train_samples"] = min(max_train_samples, len(vectorized_datasets["train"]))
587
+ trainer.log_metrics("train", metrics)
588
+ trainer.save_metrics("train", metrics)
589
+ trainer.save_state()
590
+
591
+ # 13. Evaluation
592
+ results = {}
593
+ if training_args.do_eval:
594
+ logger.info("*** Evaluate ***")
595
+ metrics = trainer.evaluate(
596
+ metric_key_prefix="eval",
597
+ max_length=training_args.generation_max_length,
598
+ num_beams=training_args.generation_num_beams,
599
+ )
600
+ max_eval_samples = (
601
+ data_args.max_eval_samples if data_args.max_eval_samples is not None else len(vectorized_datasets["eval"])
602
+ )
603
+ metrics["eval_samples"] = min(max_eval_samples, len(vectorized_datasets["eval"]))
604
+
605
+ trainer.log_metrics("eval", metrics)
606
+ trainer.save_metrics("eval", metrics)
607
+
608
+ # 14. Write Training Stats
609
+ kwargs = {"finetuned_from": model_args.model_name_or_path, "tasks": "automatic-speech-recognition"}
610
+ if data_args.dataset_name is not None:
611
+ kwargs["dataset_tags"] = data_args.dataset_name
612
+ if data_args.dataset_config_name is not None:
613
+ kwargs["dataset_args"] = data_args.dataset_config_name
614
+ kwargs["dataset"] = f"{data_args.dataset_name} {data_args.dataset_config_name}"
615
+ else:
616
+ kwargs["dataset"] = data_args.dataset_name
617
+
618
+ if training_args.push_to_hub:
619
+ trainer.push_to_hub(**kwargs)
620
+ else:
621
+ trainer.create_model_card(**kwargs)
622
+
623
+ return results
624
+
625
+
626
+ if __name__ == "__main__":
627
+ main()
runs/Mar27_13-10-05_hf-dgx-01/events.out.tfevents.1711541419.hf-dgx-01.1400507.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a109c2e4a5125fb6066198bb5af7713187de92d0b970baf44f9021647264eb86
3
+ size 13808
runs/Mar27_14-10-22_hf-dgx-01/events.out.tfevents.1711545032.hf-dgx-01.1482814.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b67a383403ca8a4a67fbfb5aa8c6a7fafe4a5f18111fed3361a5c8d190e2746d
3
+ size 14126
special_tokens_map.json ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|startoftranscript|>",
4
+ "<|en|>",
5
+ "<|zh|>",
6
+ "<|de|>",
7
+ "<|es|>",
8
+ "<|ru|>",
9
+ "<|ko|>",
10
+ "<|fr|>",
11
+ "<|ja|>",
12
+ "<|pt|>",
13
+ "<|tr|>",
14
+ "<|pl|>",
15
+ "<|ca|>",
16
+ "<|nl|>",
17
+ "<|ar|>",
18
+ "<|sv|>",
19
+ "<|it|>",
20
+ "<|id|>",
21
+ "<|hi|>",
22
+ "<|fi|>",
23
+ "<|vi|>",
24
+ "<|he|>",
25
+ "<|uk|>",
26
+ "<|el|>",
27
+ "<|ms|>",
28
+ "<|cs|>",
29
+ "<|ro|>",
30
+ "<|da|>",
31
+ "<|hu|>",
32
+ "<|ta|>",
33
+ "<|no|>",
34
+ "<|th|>",
35
+ "<|ur|>",
36
+ "<|hr|>",
37
+ "<|bg|>",
38
+ "<|lt|>",
39
+ "<|la|>",
40
+ "<|mi|>",
41
+ "<|ml|>",
42
+ "<|cy|>",
43
+ "<|sk|>",
44
+ "<|te|>",
45
+ "<|fa|>",
46
+ "<|lv|>",
47
+ "<|bn|>",
48
+ "<|sr|>",
49
+ "<|az|>",
50
+ "<|sl|>",
51
+ "<|kn|>",
52
+ "<|et|>",
53
+ "<|mk|>",
54
+ "<|br|>",
55
+ "<|eu|>",
56
+ "<|is|>",
57
+ "<|hy|>",
58
+ "<|ne|>",
59
+ "<|mn|>",
60
+ "<|bs|>",
61
+ "<|kk|>",
62
+ "<|sq|>",
63
+ "<|sw|>",
64
+ "<|gl|>",
65
+ "<|mr|>",
66
+ "<|pa|>",
67
+ "<|si|>",
68
+ "<|km|>",
69
+ "<|sn|>",
70
+ "<|yo|>",
71
+ "<|so|>",
72
+ "<|af|>",
73
+ "<|oc|>",
74
+ "<|ka|>",
75
+ "<|be|>",
76
+ "<|tg|>",
77
+ "<|sd|>",
78
+ "<|gu|>",
79
+ "<|am|>",
80
+ "<|yi|>",
81
+ "<|lo|>",
82
+ "<|uz|>",
83
+ "<|fo|>",
84
+ "<|ht|>",
85
+ "<|ps|>",
86
+ "<|tk|>",
87
+ "<|nn|>",
88
+ "<|mt|>",
89
+ "<|sa|>",
90
+ "<|lb|>",
91
+ "<|my|>",
92
+ "<|bo|>",
93
+ "<|tl|>",
94
+ "<|mg|>",
95
+ "<|as|>",
96
+ "<|tt|>",
97
+ "<|haw|>",
98
+ "<|ln|>",
99
+ "<|ha|>",
100
+ "<|ba|>",
101
+ "<|jw|>",
102
+ "<|su|>",
103
+ "<|yue|>",
104
+ "<|translate|>",
105
+ "<|transcribe|>",
106
+ "<|startoflm|>",
107
+ "<|startofprev|>",
108
+ "<|nospeech|>",
109
+ "<|notimestamps|>"
110
+ ],
111
+ "bos_token": {
112
+ "content": "<|endoftext|>",
113
+ "lstrip": false,
114
+ "normalized": false,
115
+ "rstrip": false,
116
+ "single_word": false
117
+ },
118
+ "eos_token": {
119
+ "content": "<|endoftext|>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false
124
+ },
125
+ "pad_token": {
126
+ "content": "<|endoftext|>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false
131
+ },
132
+ "unk_token": {
133
+ "content": "<|endoftext|>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false
138
+ }
139
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f27ea4e0d69ad73da18d7df2aac11132046f87eda8cb3c5ff28639d1fba157c7
3
+ size 5048
vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
wandb/debug-cli.sanchit.log ADDED
File without changes
wandb/debug-internal.log ADDED
The diff for this file is too large to render. See raw diff
 
wandb/debug.log ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Current SDK version is 0.16.2
2
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Configure stats pid to 1482814
3
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/.config/wandb/settings
4
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/distil-large-v3-hi-ft-frozen-encoder/wandb/settings
5
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program_relpath': 'run_speech_recognition_seq2seq.py', 'program_abspath': '/home/sanchit/distil-large-v3-hi-ft-frozen-encoder/run_speech_recognition_seq2seq.py', 'program': 'run_speech_recognition_seq2seq.py'}
8
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_init.py:_log_setup():526] Logging user logs to /home/sanchit/distil-large-v3-hi-ft-frozen-encoder/wandb/run-20240327_141033-golaq7b9/logs/debug.log
9
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_init.py:_log_setup():527] Logging internal logs to /home/sanchit/distil-large-v3-hi-ft-frozen-encoder/wandb/run-20240327_141033-golaq7b9/logs/debug-internal.log
10
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_init.py:init():566] calling init triggers
11
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_init.py:init():573] wandb.init called with sweep_config: {}
12
+ config: {}
13
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_init.py:init():616] starting backend
14
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_init.py:init():620] setting up manager
15
+ 2024-03-27 14:10:33,212 INFO MainThread:1482814 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
16
+ 2024-03-27 14:10:33,213 INFO MainThread:1482814 [wandb_init.py:init():628] backend started and connected
17
+ 2024-03-27 14:10:33,217 INFO MainThread:1482814 [wandb_init.py:init():720] updated telemetry
18
+ 2024-03-27 14:10:33,272 INFO MainThread:1482814 [wandb_init.py:init():753] communicating run to backend with 90.0 second timeout
19
+ 2024-03-27 14:10:33,578 INFO MainThread:1482814 [wandb_run.py:_on_init():2254] communicating current version
20
+ 2024-03-27 14:10:33,602 INFO MainThread:1482814 [wandb_run.py:_on_init():2263] got version response upgrade_message: "wandb version 0.16.5 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
21
+
22
+ 2024-03-27 14:10:33,602 INFO MainThread:1482814 [wandb_init.py:init():804] starting run threads in backend
23
+ 2024-03-27 14:10:34,006 INFO MainThread:1482814 [wandb_run.py:_console_start():2233] atexit reg
24
+ 2024-03-27 14:10:34,006 INFO MainThread:1482814 [wandb_run.py:_redirect():2088] redirect: wrap_raw
25
+ 2024-03-27 14:10:34,006 INFO MainThread:1482814 [wandb_run.py:_redirect():2153] Wrapping output streams.
26
+ 2024-03-27 14:10:34,007 INFO MainThread:1482814 [wandb_run.py:_redirect():2178] Redirects installed.
27
+ 2024-03-27 14:10:34,007 INFO MainThread:1482814 [wandb_init.py:init():847] run started, returning control to user process
28
+ 2024-03-27 14:10:34,009 INFO MainThread:1482814 [wandb_run.py:_config_callback():1342] config_cb None None {'vocab_size': 51866, 'num_mel_bins': 128, 'd_model': 1280, 'encoder_layers': 32, 'encoder_attention_heads': 20, 'decoder_layers': 2, 'decoder_attention_heads': 20, 'decoder_ffn_dim': 5120, 'encoder_ffn_dim': 5120, 'dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.0, 'activation_function': 'gelu', 'init_std': 0.02, 'encoder_layerdrop': 0.0, 'decoder_layerdrop': 0.0, 'use_cache': True, 'num_hidden_layers': 32, 'scale_embedding': False, 'max_source_positions': 1500, 'max_target_positions': 448, 'classifier_proj_size': 256, 'use_weighted_layer_sum': False, 'apply_spec_augment': False, 'mask_time_prob': 0.05, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.0, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'median_filter_width': 7, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': True, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 448, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': [220, 50257], 'architectures': ['WhisperForConditionalGeneration'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 50257, 'pad_token_id': 50256, 'eos_token_id': 50257, 'sep_token_id': None, 'decoder_start_token_id': 50258, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'distil-whisper/distil-large-v3', 'transformers_version': '4.40.0.dev0', 'model_type': 'whisper', 'forced_decoder_ids': None, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 32, 'per_device_eval_batch_size': 32, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 3.0, 'max_steps': 5000, 'lr_scheduler_type': 'linear', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.0, 'warmup_steps': 500, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': './runs/Mar27_14-10-22_hf-dgx-01', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 25, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 1000, 'save_total_limit': None, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': 1000, 'dataloader_num_workers': 4, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'adamw_torch', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'input_length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None, 'sortish_sampler': False, 'predict_with_generate': True, 'generation_max_length': 225, 'generation_num_beams': None, 'generation_config': None}
wandb/run-20240327_131020-92ximxsk/files/config.yaml ADDED
@@ -0,0 +1,731 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _wandb:
4
+ desc: null
5
+ value:
6
+ python_version: 3.8.10
7
+ cli_version: 0.16.2
8
+ framework: huggingface
9
+ huggingface_version: 4.40.0.dev0
10
+ is_jupyter_run: false
11
+ is_kaggle_kernel: false
12
+ start_time: 1711541420.464463
13
+ t:
14
+ 1:
15
+ - 1
16
+ - 2
17
+ - 3
18
+ - 5
19
+ - 11
20
+ - 12
21
+ - 49
22
+ - 51
23
+ - 53
24
+ - 55
25
+ - 71
26
+ - 98
27
+ - 100
28
+ 2:
29
+ - 1
30
+ - 2
31
+ - 3
32
+ - 5
33
+ - 11
34
+ - 12
35
+ - 49
36
+ - 51
37
+ - 53
38
+ - 55
39
+ - 71
40
+ - 98
41
+ - 100
42
+ 3:
43
+ - 7
44
+ - 23
45
+ 4: 3.8.10
46
+ 5: 0.16.2
47
+ 6: 4.40.0.dev0
48
+ 8:
49
+ - 5
50
+ 9:
51
+ 1: transformers_trainer
52
+ 13: linux-x86_64
53
+ m:
54
+ - 1: train/global_step
55
+ 6:
56
+ - 3
57
+ - 1: train/loss
58
+ 5: 1
59
+ 6:
60
+ - 1
61
+ - 1: train/grad_norm
62
+ 5: 1
63
+ 6:
64
+ - 1
65
+ - 1: train/learning_rate
66
+ 5: 1
67
+ 6:
68
+ - 1
69
+ - 1: train/epoch
70
+ 5: 1
71
+ 6:
72
+ - 1
73
+ vocab_size:
74
+ desc: null
75
+ value: 51866
76
+ num_mel_bins:
77
+ desc: null
78
+ value: 128
79
+ d_model:
80
+ desc: null
81
+ value: 1280
82
+ encoder_layers:
83
+ desc: null
84
+ value: 32
85
+ encoder_attention_heads:
86
+ desc: null
87
+ value: 20
88
+ decoder_layers:
89
+ desc: null
90
+ value: 2
91
+ decoder_attention_heads:
92
+ desc: null
93
+ value: 20
94
+ decoder_ffn_dim:
95
+ desc: null
96
+ value: 5120
97
+ encoder_ffn_dim:
98
+ desc: null
99
+ value: 5120
100
+ dropout:
101
+ desc: null
102
+ value: 0.0
103
+ attention_dropout:
104
+ desc: null
105
+ value: 0.0
106
+ activation_dropout:
107
+ desc: null
108
+ value: 0.0
109
+ activation_function:
110
+ desc: null
111
+ value: gelu
112
+ init_std:
113
+ desc: null
114
+ value: 0.02
115
+ encoder_layerdrop:
116
+ desc: null
117
+ value: 0.0
118
+ decoder_layerdrop:
119
+ desc: null
120
+ value: 0.0
121
+ use_cache:
122
+ desc: null
123
+ value: true
124
+ num_hidden_layers:
125
+ desc: null
126
+ value: 32
127
+ scale_embedding:
128
+ desc: null
129
+ value: false
130
+ max_source_positions:
131
+ desc: null
132
+ value: 1500
133
+ max_target_positions:
134
+ desc: null
135
+ value: 448
136
+ classifier_proj_size:
137
+ desc: null
138
+ value: 256
139
+ use_weighted_layer_sum:
140
+ desc: null
141
+ value: false
142
+ apply_spec_augment:
143
+ desc: null
144
+ value: false
145
+ mask_time_prob:
146
+ desc: null
147
+ value: 0.05
148
+ mask_time_length:
149
+ desc: null
150
+ value: 10
151
+ mask_time_min_masks:
152
+ desc: null
153
+ value: 2
154
+ mask_feature_prob:
155
+ desc: null
156
+ value: 0.0
157
+ mask_feature_length:
158
+ desc: null
159
+ value: 10
160
+ mask_feature_min_masks:
161
+ desc: null
162
+ value: 0
163
+ median_filter_width:
164
+ desc: null
165
+ value: 7
166
+ return_dict:
167
+ desc: null
168
+ value: true
169
+ output_hidden_states:
170
+ desc: null
171
+ value: false
172
+ output_attentions:
173
+ desc: null
174
+ value: false
175
+ torchscript:
176
+ desc: null
177
+ value: false
178
+ torch_dtype:
179
+ desc: null
180
+ value: float16
181
+ use_bfloat16:
182
+ desc: null
183
+ value: false
184
+ tf_legacy_loss:
185
+ desc: null
186
+ value: false
187
+ pruned_heads:
188
+ desc: null
189
+ value: {}
190
+ tie_word_embeddings:
191
+ desc: null
192
+ value: true
193
+ chunk_size_feed_forward:
194
+ desc: null
195
+ value: 0
196
+ is_encoder_decoder:
197
+ desc: null
198
+ value: true
199
+ is_decoder:
200
+ desc: null
201
+ value: false
202
+ cross_attention_hidden_size:
203
+ desc: null
204
+ value: null
205
+ add_cross_attention:
206
+ desc: null
207
+ value: false
208
+ tie_encoder_decoder:
209
+ desc: null
210
+ value: false
211
+ max_length:
212
+ desc: null
213
+ value: 448
214
+ min_length:
215
+ desc: null
216
+ value: 0
217
+ do_sample:
218
+ desc: null
219
+ value: false
220
+ early_stopping:
221
+ desc: null
222
+ value: false
223
+ num_beams:
224
+ desc: null
225
+ value: 1
226
+ num_beam_groups:
227
+ desc: null
228
+ value: 1
229
+ diversity_penalty:
230
+ desc: null
231
+ value: 0.0
232
+ temperature:
233
+ desc: null
234
+ value: 1.0
235
+ top_k:
236
+ desc: null
237
+ value: 50
238
+ top_p:
239
+ desc: null
240
+ value: 1.0
241
+ typical_p:
242
+ desc: null
243
+ value: 1.0
244
+ repetition_penalty:
245
+ desc: null
246
+ value: 1.0
247
+ length_penalty:
248
+ desc: null
249
+ value: 1.0
250
+ no_repeat_ngram_size:
251
+ desc: null
252
+ value: 0
253
+ encoder_no_repeat_ngram_size:
254
+ desc: null
255
+ value: 0
256
+ bad_words_ids:
257
+ desc: null
258
+ value: null
259
+ num_return_sequences:
260
+ desc: null
261
+ value: 1
262
+ output_scores:
263
+ desc: null
264
+ value: false
265
+ return_dict_in_generate:
266
+ desc: null
267
+ value: false
268
+ forced_bos_token_id:
269
+ desc: null
270
+ value: null
271
+ forced_eos_token_id:
272
+ desc: null
273
+ value: null
274
+ remove_invalid_values:
275
+ desc: null
276
+ value: false
277
+ exponential_decay_length_penalty:
278
+ desc: null
279
+ value: null
280
+ suppress_tokens:
281
+ desc: null
282
+ value: null
283
+ begin_suppress_tokens:
284
+ desc: null
285
+ value:
286
+ - 220
287
+ - 50257
288
+ architectures:
289
+ desc: null
290
+ value:
291
+ - WhisperForConditionalGeneration
292
+ finetuning_task:
293
+ desc: null
294
+ value: null
295
+ id2label:
296
+ desc: null
297
+ value:
298
+ '0': LABEL_0
299
+ '1': LABEL_1
300
+ label2id:
301
+ desc: null
302
+ value:
303
+ LABEL_0: 0
304
+ LABEL_1: 1
305
+ tokenizer_class:
306
+ desc: null
307
+ value: null
308
+ prefix:
309
+ desc: null
310
+ value: null
311
+ bos_token_id:
312
+ desc: null
313
+ value: 50257
314
+ pad_token_id:
315
+ desc: null
316
+ value: 50256
317
+ eos_token_id:
318
+ desc: null
319
+ value: 50257
320
+ sep_token_id:
321
+ desc: null
322
+ value: null
323
+ decoder_start_token_id:
324
+ desc: null
325
+ value: 50258
326
+ task_specific_params:
327
+ desc: null
328
+ value: null
329
+ problem_type:
330
+ desc: null
331
+ value: null
332
+ _name_or_path:
333
+ desc: null
334
+ value: distil-whisper/distil-large-v3
335
+ transformers_version:
336
+ desc: null
337
+ value: 4.40.0.dev0
338
+ model_type:
339
+ desc: null
340
+ value: whisper
341
+ forced_decoder_ids:
342
+ desc: null
343
+ value: null
344
+ output_dir:
345
+ desc: null
346
+ value: ./
347
+ overwrite_output_dir:
348
+ desc: null
349
+ value: true
350
+ do_train:
351
+ desc: null
352
+ value: true
353
+ do_eval:
354
+ desc: null
355
+ value: true
356
+ do_predict:
357
+ desc: null
358
+ value: false
359
+ evaluation_strategy:
360
+ desc: null
361
+ value: steps
362
+ prediction_loss_only:
363
+ desc: null
364
+ value: false
365
+ per_device_train_batch_size:
366
+ desc: null
367
+ value: 32
368
+ per_device_eval_batch_size:
369
+ desc: null
370
+ value: 32
371
+ per_gpu_train_batch_size:
372
+ desc: null
373
+ value: null
374
+ per_gpu_eval_batch_size:
375
+ desc: null
376
+ value: null
377
+ gradient_accumulation_steps:
378
+ desc: null
379
+ value: 1
380
+ eval_accumulation_steps:
381
+ desc: null
382
+ value: null
383
+ eval_delay:
384
+ desc: null
385
+ value: 0
386
+ learning_rate:
387
+ desc: null
388
+ value: 0.0001
389
+ weight_decay:
390
+ desc: null
391
+ value: 0.0
392
+ adam_beta1:
393
+ desc: null
394
+ value: 0.9
395
+ adam_beta2:
396
+ desc: null
397
+ value: 0.999
398
+ adam_epsilon:
399
+ desc: null
400
+ value: 1.0e-08
401
+ max_grad_norm:
402
+ desc: null
403
+ value: 1.0
404
+ num_train_epochs:
405
+ desc: null
406
+ value: 3.0
407
+ max_steps:
408
+ desc: null
409
+ value: 5000
410
+ lr_scheduler_type:
411
+ desc: null
412
+ value: linear
413
+ lr_scheduler_kwargs:
414
+ desc: null
415
+ value: {}
416
+ warmup_ratio:
417
+ desc: null
418
+ value: 0.0
419
+ warmup_steps:
420
+ desc: null
421
+ value: 500
422
+ log_level:
423
+ desc: null
424
+ value: passive
425
+ log_level_replica:
426
+ desc: null
427
+ value: warning
428
+ log_on_each_node:
429
+ desc: null
430
+ value: true
431
+ logging_dir:
432
+ desc: null
433
+ value: ./runs/Mar27_13-10-05_hf-dgx-01
434
+ logging_strategy:
435
+ desc: null
436
+ value: steps
437
+ logging_first_step:
438
+ desc: null
439
+ value: false
440
+ logging_steps:
441
+ desc: null
442
+ value: 25
443
+ logging_nan_inf_filter:
444
+ desc: null
445
+ value: true
446
+ save_strategy:
447
+ desc: null
448
+ value: steps
449
+ save_steps:
450
+ desc: null
451
+ value: 1000
452
+ save_total_limit:
453
+ desc: null
454
+ value: null
455
+ save_safetensors:
456
+ desc: null
457
+ value: true
458
+ save_on_each_node:
459
+ desc: null
460
+ value: false
461
+ save_only_model:
462
+ desc: null
463
+ value: false
464
+ no_cuda:
465
+ desc: null
466
+ value: false
467
+ use_cpu:
468
+ desc: null
469
+ value: false
470
+ use_mps_device:
471
+ desc: null
472
+ value: false
473
+ seed:
474
+ desc: null
475
+ value: 42
476
+ data_seed:
477
+ desc: null
478
+ value: null
479
+ jit_mode_eval:
480
+ desc: null
481
+ value: false
482
+ use_ipex:
483
+ desc: null
484
+ value: false
485
+ bf16:
486
+ desc: null
487
+ value: false
488
+ fp16:
489
+ desc: null
490
+ value: true
491
+ fp16_opt_level:
492
+ desc: null
493
+ value: O1
494
+ half_precision_backend:
495
+ desc: null
496
+ value: auto
497
+ bf16_full_eval:
498
+ desc: null
499
+ value: false
500
+ fp16_full_eval:
501
+ desc: null
502
+ value: false
503
+ tf32:
504
+ desc: null
505
+ value: null
506
+ local_rank:
507
+ desc: null
508
+ value: 0
509
+ ddp_backend:
510
+ desc: null
511
+ value: null
512
+ tpu_num_cores:
513
+ desc: null
514
+ value: null
515
+ tpu_metrics_debug:
516
+ desc: null
517
+ value: false
518
+ debug:
519
+ desc: null
520
+ value: []
521
+ dataloader_drop_last:
522
+ desc: null
523
+ value: false
524
+ eval_steps:
525
+ desc: null
526
+ value: 1000
527
+ dataloader_num_workers:
528
+ desc: null
529
+ value: 4
530
+ dataloader_prefetch_factor:
531
+ desc: null
532
+ value: null
533
+ past_index:
534
+ desc: null
535
+ value: -1
536
+ run_name:
537
+ desc: null
538
+ value: ./
539
+ disable_tqdm:
540
+ desc: null
541
+ value: false
542
+ remove_unused_columns:
543
+ desc: null
544
+ value: true
545
+ label_names:
546
+ desc: null
547
+ value: null
548
+ load_best_model_at_end:
549
+ desc: null
550
+ value: false
551
+ metric_for_best_model:
552
+ desc: null
553
+ value: null
554
+ greater_is_better:
555
+ desc: null
556
+ value: null
557
+ ignore_data_skip:
558
+ desc: null
559
+ value: false
560
+ fsdp:
561
+ desc: null
562
+ value: []
563
+ fsdp_min_num_params:
564
+ desc: null
565
+ value: 0
566
+ fsdp_config:
567
+ desc: null
568
+ value:
569
+ min_num_params: 0
570
+ xla: false
571
+ xla_fsdp_v2: false
572
+ xla_fsdp_grad_ckpt: false
573
+ fsdp_transformer_layer_cls_to_wrap:
574
+ desc: null
575
+ value: null
576
+ accelerator_config:
577
+ desc: null
578
+ value:
579
+ split_batches: false
580
+ dispatch_batches: null
581
+ even_batches: true
582
+ use_seedable_sampler: true
583
+ deepspeed:
584
+ desc: null
585
+ value: null
586
+ label_smoothing_factor:
587
+ desc: null
588
+ value: 0.0
589
+ optim:
590
+ desc: null
591
+ value: adamw_torch
592
+ optim_args:
593
+ desc: null
594
+ value: null
595
+ adafactor:
596
+ desc: null
597
+ value: false
598
+ group_by_length:
599
+ desc: null
600
+ value: false
601
+ length_column_name:
602
+ desc: null
603
+ value: input_length
604
+ report_to:
605
+ desc: null
606
+ value:
607
+ - tensorboard
608
+ - wandb
609
+ ddp_find_unused_parameters:
610
+ desc: null
611
+ value: null
612
+ ddp_bucket_cap_mb:
613
+ desc: null
614
+ value: null
615
+ ddp_broadcast_buffers:
616
+ desc: null
617
+ value: null
618
+ dataloader_pin_memory:
619
+ desc: null
620
+ value: true
621
+ dataloader_persistent_workers:
622
+ desc: null
623
+ value: false
624
+ skip_memory_metrics:
625
+ desc: null
626
+ value: true
627
+ use_legacy_prediction_loop:
628
+ desc: null
629
+ value: false
630
+ push_to_hub:
631
+ desc: null
632
+ value: true
633
+ resume_from_checkpoint:
634
+ desc: null
635
+ value: null
636
+ hub_model_id:
637
+ desc: null
638
+ value: null
639
+ hub_strategy:
640
+ desc: null
641
+ value: every_save
642
+ hub_token:
643
+ desc: null
644
+ value: <HUB_TOKEN>
645
+ hub_private_repo:
646
+ desc: null
647
+ value: false
648
+ hub_always_push:
649
+ desc: null
650
+ value: false
651
+ gradient_checkpointing:
652
+ desc: null
653
+ value: true
654
+ gradient_checkpointing_kwargs:
655
+ desc: null
656
+ value: null
657
+ include_inputs_for_metrics:
658
+ desc: null
659
+ value: false
660
+ fp16_backend:
661
+ desc: null
662
+ value: auto
663
+ push_to_hub_model_id:
664
+ desc: null
665
+ value: null
666
+ push_to_hub_organization:
667
+ desc: null
668
+ value: null
669
+ push_to_hub_token:
670
+ desc: null
671
+ value: <PUSH_TO_HUB_TOKEN>
672
+ mp_parameters:
673
+ desc: null
674
+ value: ''
675
+ auto_find_batch_size:
676
+ desc: null
677
+ value: false
678
+ full_determinism:
679
+ desc: null
680
+ value: false
681
+ torchdynamo:
682
+ desc: null
683
+ value: null
684
+ ray_scope:
685
+ desc: null
686
+ value: last
687
+ ddp_timeout:
688
+ desc: null
689
+ value: 1800
690
+ torch_compile:
691
+ desc: null
692
+ value: false
693
+ torch_compile_backend:
694
+ desc: null
695
+ value: null
696
+ torch_compile_mode:
697
+ desc: null
698
+ value: null
699
+ dispatch_batches:
700
+ desc: null
701
+ value: null
702
+ split_batches:
703
+ desc: null
704
+ value: null
705
+ include_tokens_per_second:
706
+ desc: null
707
+ value: false
708
+ include_num_input_tokens_seen:
709
+ desc: null
710
+ value: false
711
+ neftune_noise_alpha:
712
+ desc: null
713
+ value: null
714
+ optim_target_modules:
715
+ desc: null
716
+ value: null
717
+ sortish_sampler:
718
+ desc: null
719
+ value: false
720
+ predict_with_generate:
721
+ desc: null
722
+ value: true
723
+ generation_max_length:
724
+ desc: null
725
+ value: 225
726
+ generation_num_beams:
727
+ desc: null
728
+ value: null
729
+ generation_config:
730
+ desc: null
731
+ value: null
wandb/run-20240327_131020-92ximxsk/files/output.log ADDED
@@ -0,0 +1,896 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ 0%| | 0/5000 [00:00<?, ?it/s]/home/sanchit/hf/lib/python3.8/site-packages/torch/utils/checkpoint.py:460: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
3
+ warnings.warn(
4
+ /home/sanchit/hf/lib/python3.8/site-packages/torch/utils/checkpoint.py:90: UserWarning: None of the inputs have requires_grad=True. Gradients will be None
5
+ warnings.warn(
6
+ [WARNING|logging.py:329] 2024-03-27 13:10:35,109 >> `use_cache = True` is incompatible with gradient checkpointing. Setting `use_cache = False`...
7
+
8
+
9
+
10
+
11
+
12
+
13
+
14
+
15
+
16
+
17
+
18
+
19
+
20
+
21
+
22
+
23
+
24
+
25
+
26
+
27
+ 0%|▍ | 25/5000 [00:54<2:16:50, 1.65s/it]
28
+
29
+
30
+
31
+
32
+
33
+
34
+
35
+
36
+
37
+
38
+
39
+
40
+
41
+
42
+
43
+
44
+
45
+
46
+
47
+
48
+ 1%|▊ | 50/5000 [01:35<2:16:12, 1.65s/it]
49
+
50
+
51
+
52
+
53
+
54
+
55
+
56
+
57
+
58
+
59
+
60
+
61
+
62
+
63
+
64
+
65
+
66
+
67
+
68
+
69
+
70
+ 2%|█▏ | 75/5000 [02:17<2:16:13, 1.66s/it]
71
+
72
+
73
+
74
+
75
+
76
+
77
+
78
+
79
+
80
+
81
+
82
+
83
+
84
+
85
+
86
+
87
+
88
+
89
+
90
+
91
+ 2%|█▌ | 99/5000 [02:56<2:15:03, 1.65s/it]
92
+
93
+
94
+
95
+
96
+
97
+
98
+
99
+
100
+
101
+
102
+
103
+
104
+
105
+
106
+
107
+
108
+
109
+
110
+
111
+
112
+
113
+ 2%|█▉ | 125/5000 [03:40<2:14:32, 1.66s/it]
114
+
115
+
116
+
117
+
118
+
119
+
120
+
121
+
122
+
123
+
124
+
125
+
126
+
127
+
128
+
129
+
130
+
131
+
132
+
133
+
134
+
135
+ 3%|██▎ | 150/5000 [04:21<2:13:41, 1.65s/it]
136
+
137
+
138
+
139
+
140
+
141
+
142
+
143
+
144
+
145
+
146
+
147
+
148
+
149
+
150
+
151
+
152
+
153
+
154
+
155
+
156
+ 3%|██▋ | 174/5000 [05:01<2:13:07, 1.66s/it]
157
+
158
+
159
+
160
+
161
+
162
+
163
+
164
+
165
+
166
+
167
+
168
+
169
+
170
+
171
+
172
+
173
+
174
+
175
+
176
+
177
+
178
+ 4%|███ | 200/5000 [05:44<2:12:21, 1.65s/it]
179
+
180
+
181
+
182
+
183
+
184
+
185
+
186
+
187
+
188
+
189
+
190
+
191
+
192
+
193
+
194
+
195
+
196
+
197
+
198
+
199
+
200
+ 4%|███▌ | 225/5000 [06:37<4:27:11, 3.36s/it]
201
+
202
+
203
+
204
+
205
+
206
+
207
+
208
+
209
+
210
+
211
+
212
+
213
+
214
+
215
+
216
+
217
+
218
+
219
+
220
+
221
+
222
+ 5%|███▉ | 251/5000 [07:20<2:11:03, 1.66s/it]
223
+
224
+
225
+
226
+
227
+
228
+
229
+
230
+
231
+
232
+
233
+
234
+
235
+
236
+
237
+
238
+
239
+
240
+
241
+
242
+
243
+ 6%|████▎ | 275/5000 [08:00<2:10:01, 1.65s/it]
244
+
245
+
246
+
247
+
248
+
249
+
250
+
251
+
252
+
253
+
254
+
255
+
256
+
257
+
258
+
259
+
260
+
261
+
262
+
263
+
264
+
265
+ 6%|████▋ | 300/5000 [08:41<2:10:03, 1.66s/it]
266
+
267
+
268
+
269
+
270
+
271
+
272
+
273
+
274
+
275
+
276
+
277
+
278
+
279
+
280
+
281
+
282
+
283
+
284
+
285
+
286
+ 6%|█████ | 324/5000 [09:21<2:09:00, 1.66s/it]
287
+
288
+
289
+
290
+
291
+
292
+
293
+
294
+
295
+
296
+
297
+
298
+
299
+
300
+
301
+
302
+
303
+
304
+
305
+
306
+
307
+
308
+ 7%|█████▍ | 350/5000 [10:04<2:08:20, 1.66s/it]
309
+
310
+
311
+
312
+
313
+
314
+
315
+
316
+
317
+
318
+
319
+
320
+
321
+
322
+
323
+
324
+
325
+
326
+
327
+
328
+
329
+
330
+ 8%|█████▊ | 375/5000 [10:46<2:07:38, 1.66s/it]
331
+
332
+
333
+
334
+
335
+
336
+
337
+
338
+
339
+
340
+
341
+
342
+
343
+
344
+
345
+
346
+
347
+
348
+
349
+
350
+
351
+
352
+ 8%|██████▏ | 400/5000 [11:27<2:06:46, 1.65s/it]
353
+
354
+
355
+
356
+
357
+
358
+
359
+
360
+
361
+
362
+
363
+
364
+
365
+
366
+
367
+
368
+
369
+
370
+
371
+
372
+
373
+ 8%|██████▋ | 425/5000 [12:08<2:06:11, 1.65s/it]
374
+
375
+
376
+
377
+
378
+
379
+
380
+
381
+
382
+
383
+
384
+
385
+
386
+
387
+
388
+
389
+
390
+
391
+
392
+
393
+
394
+
395
+ 9%|███████ | 450/5000 [13:02<2:54:02, 2.30s/it]
396
+
397
+
398
+
399
+
400
+
401
+
402
+
403
+
404
+
405
+
406
+
407
+
408
+
409
+
410
+
411
+
412
+
413
+
414
+
415
+
416
+ 9%|███████▍ | 474/5000 [13:41<2:03:05, 1.63s/it]
417
+
418
+
419
+
420
+
421
+
422
+
423
+
424
+
425
+
426
+
427
+
428
+
429
+
430
+
431
+
432
+
433
+
434
+
435
+
436
+
437
+
438
+ 10%|███████▊ | 500/5000 [14:24<2:02:02, 1.63s/it]
439
+
440
+
441
+
442
+
443
+
444
+
445
+
446
+
447
+
448
+
449
+
450
+
451
+
452
+
453
+
454
+
455
+
456
+
457
+
458
+
459
+
460
+ 10%|████████▏ | 525/5000 [15:05<2:03:53, 1.66s/it]
461
+
462
+
463
+
464
+
465
+
466
+
467
+
468
+
469
+
470
+
471
+
472
+
473
+
474
+
475
+
476
+
477
+
478
+
479
+
480
+
481
+ 11%|████████▌ | 550/5000 [15:47<2:02:50, 1.66s/it]
482
+
483
+
484
+
485
+
486
+
487
+
488
+
489
+
490
+
491
+
492
+
493
+
494
+
495
+
496
+
497
+
498
+
499
+
500
+
501
+
502
+
503
+ 12%|████████▉ | 575/5000 [16:28<2:01:48, 1.65s/it]
504
+
505
+
506
+
507
+
508
+
509
+
510
+
511
+
512
+
513
+
514
+
515
+
516
+
517
+
518
+
519
+
520
+
521
+
522
+
523
+
524
+
525
+ 12%|█████████▍ | 601/5000 [17:11<2:01:44, 1.66s/it]
526
+
527
+
528
+
529
+
530
+
531
+
532
+
533
+
534
+
535
+
536
+
537
+
538
+
539
+
540
+
541
+
542
+
543
+
544
+
545
+
546
+ 12%|█████████▊ | 625/5000 [17:51<2:00:52, 1.66s/it]
547
+
548
+
549
+
550
+
551
+
552
+
553
+
554
+
555
+
556
+
557
+
558
+
559
+
560
+
561
+
562
+
563
+
564
+
565
+
566
+
567
+
568
+ 13%|██████████▏ | 650/5000 [18:32<2:00:13, 1.66s/it]
569
+
570
+
571
+
572
+
573
+
574
+
575
+
576
+
577
+
578
+
579
+
580
+
581
+
582
+
583
+
584
+
585
+
586
+ 14%|██████████▌ | 675/5000 [19:18<2:11:57, 1.83s/it]
587
+
588
+
589
+
590
+
591
+
592
+
593
+
594
+
595
+
596
+
597
+
598
+
599
+
600
+
601
+
602
+
603
+
604
+
605
+
606
+
607
+
608
+ 14%|██████████▉ | 701/5000 [20:01<1:58:29, 1.65s/it]
609
+
610
+
611
+
612
+
613
+
614
+
615
+
616
+
617
+
618
+
619
+
620
+
621
+
622
+
623
+
624
+
625
+
626
+
627
+
628
+
629
+ 14%|███████████▎ | 725/5000 [20:41<1:58:14, 1.66s/it]
630
+
631
+
632
+
633
+
634
+
635
+
636
+
637
+
638
+
639
+
640
+
641
+
642
+
643
+
644
+
645
+
646
+
647
+
648
+
649
+
650
+
651
+ 15%|███████████▋ | 750/5000 [21:22<1:57:12, 1.65s/it]
652
+
653
+
654
+
655
+
656
+
657
+
658
+
659
+
660
+
661
+
662
+
663
+
664
+
665
+
666
+
667
+
668
+
669
+
670
+
671
+
672
+ 16%|████████████ | 775/5000 [22:04<1:56:30, 1.65s/it]
673
+
674
+
675
+
676
+
677
+
678
+
679
+
680
+
681
+
682
+
683
+
684
+
685
+
686
+
687
+
688
+
689
+
690
+
691
+
692
+
693
+
694
+ 16%|████████████▍ | 800/5000 [22:45<1:55:46, 1.65s/it]
695
+
696
+
697
+
698
+
699
+
700
+
701
+
702
+
703
+
704
+
705
+
706
+
707
+
708
+
709
+
710
+
711
+
712
+
713
+
714
+
715
+
716
+ 16%|████████████▊ | 825/5000 [23:26<1:55:21, 1.66s/it]
717
+
718
+
719
+
720
+
721
+
722
+
723
+
724
+
725
+
726
+
727
+
728
+
729
+
730
+
731
+
732
+
733
+
734
+
735
+
736
+
737
+ 17%|█████████████▎ | 850/5000 [24:08<1:54:22, 1.65s/it]
738
+
739
+
740
+
741
+
742
+
743
+
744
+
745
+
746
+
747
+
748
+
749
+
750
+
751
+
752
+
753
+
754
+
755
+
756
+
757
+
758
+
759
+ 18%|█████████████▋ | 875/5000 [24:49<1:53:55, 1.66s/it]
760
+
761
+
762
+
763
+
764
+
765
+
766
+
767
+
768
+
769
+
770
+
771
+
772
+
773
+
774
+
775
+
776
+
777
+
778
+
779
+
780
+
781
+ 18%|██████████████ | 899/5000 [25:41<2:00:47, 1.77s/it]
782
+
783
+
784
+
785
+
786
+
787
+
788
+
789
+
790
+
791
+
792
+
793
+
794
+
795
+
796
+
797
+
798
+
799
+
800
+
801
+
802
+ 18%|██████████████▍ | 924/5000 [26:22<1:52:22, 1.65s/it]
803
+
804
+
805
+
806
+
807
+
808
+
809
+
810
+
811
+
812
+
813
+
814
+
815
+
816
+
817
+
818
+
819
+
820
+
821
+
822
+
823
+
824
+ 19%|██████████████▊ | 949/5000 [27:03<1:51:39, 1.65s/it]
825
+
826
+
827
+
828
+
829
+
830
+
831
+
832
+
833
+
834
+
835
+
836
+
837
+
838
+
839
+
840
+
841
+
842
+
843
+
844
+
845
+
846
+ 19%|███████████████▏ | 974/5000 [27:45<1:51:20, 1.66s/it]
847
+
848
+
849
+
850
+
851
+
852
+
853
+
854
+
855
+
856
+
857
+
858
+
859
+
860
+
861
+
862
+
863
+
864
+
865
+
866
+
867
+ 20%|███████████████▌ | 999/5000 [28:26<1:50:30, 1.66s/it]
868
+ 20%|███████████████▍ | 1000/5000 [28:28<1:46:27, 1.60s/it][INFO|trainer.py:768] 2024-03-27 13:38:49,563 >> The following columns in the evaluation set don't have a corresponding argument in `WhisperForConditionalGeneration.forward` and have been ignored: input_length. If input_length are not expected by `WhisperForConditionalGeneration.forward`, you can safely ignore this message.
869
+ [INFO|trainer.py:3515] 2024-03-27 13:38:49,566 >> ***** Running Evaluation *****
870
+ [INFO|trainer.py:3517] 2024-03-27 13:38:49,566 >> Num examples = 3123
871
+ [INFO|trainer.py:3520] 2024-03-27 13:38:49,566 >> Batch size = 32
872
+ [INFO|generation_whisper.py:1111] 2024-03-27 13:39:01,038 >> You have passed task=50360, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=50360.
873
+ Traceback (most recent call last):
874
+ File "run_speech_recognition_seq2seq.py", line 627, in <module>
875
+ main()
876
+ File "run_speech_recognition_seq2seq.py", line 577, in main
877
+ train_result = trainer.train(resume_from_checkpoint=checkpoint)
878
+ File "/home/sanchit/transformers/src/transformers/trainer.py", line 1774, in train
879
+ return inner_training_loop(
880
+ File "/home/sanchit/transformers/src/transformers/trainer.py", line 2196, in _inner_training_loop
881
+ self._maybe_log_save_evaluate(tr_loss, grad_norm, model, trial, epoch, ignore_keys_for_eval)
882
+ File "/home/sanchit/transformers/src/transformers/trainer.py", line 2580, in _maybe_log_save_evaluate
883
+ metrics = self.evaluate(ignore_keys=ignore_keys_for_eval)
884
+ File "/home/sanchit/transformers/src/transformers/trainer_seq2seq.py", line 180, in evaluate
885
+ return super().evaluate(eval_dataset, ignore_keys=ignore_keys, metric_key_prefix=metric_key_prefix)
886
+ File "/home/sanchit/transformers/src/transformers/trainer.py", line 3368, in evaluate
887
+ output = eval_loop(
888
+ File "/home/sanchit/transformers/src/transformers/trainer.py", line 3557, in evaluation_loop
889
+ loss, logits, labels = self.prediction_step(model, inputs, prediction_loss_only, ignore_keys=ignore_keys)
890
+ File "/home/sanchit/transformers/src/transformers/trainer_seq2seq.py", line 310, in prediction_step
891
+ generated_tokens = self.model.generate(**generation_inputs, **gen_kwargs)
892
+ File "/home/sanchit/transformers/src/transformers/models/whisper/generation_whisper.py", line 534, in generate
893
+ init_tokens = self._retrieve_init_tokens(
894
+ File "/home/sanchit/transformers/src/transformers/models/whisper/generation_whisper.py", line 1146, in _retrieve_init_tokens
895
+ is_language_code = len(language) == 2
896
+ TypeError: object of type 'int' has no len()
wandb/run-20240327_131020-92ximxsk/files/requirements.txt ADDED
@@ -0,0 +1,247 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ absl-py==2.1.0
2
+ accelerate==0.27.2
3
+ aiohttp==3.9.3
4
+ aiosignal==1.3.1
5
+ anyio==4.2.0
6
+ appdirs==1.4.4
7
+ argon2-cffi-bindings==21.2.0
8
+ argon2-cffi==23.1.0
9
+ arrow==1.3.0
10
+ asttokens==2.4.1
11
+ astunparse==1.6.3
12
+ async-lru==2.0.4
13
+ async-timeout==4.0.3
14
+ attrs==23.2.0
15
+ audioread==3.0.1
16
+ av==11.0.0
17
+ babel==2.14.0
18
+ backcall==0.2.0
19
+ beautifulsoup4==4.12.3
20
+ bitsandbytes==0.42.0
21
+ bleach==6.1.0
22
+ cached-property==1.5.2
23
+ cachetools==5.3.2
24
+ certifi==2024.2.2
25
+ cffi==1.16.0
26
+ charset-normalizer==3.3.2
27
+ chex==0.1.7
28
+ click==8.1.7
29
+ coloredlogs==15.0.1
30
+ comm==0.2.1
31
+ contourpy==1.1.1
32
+ ctranslate2==4.1.0
33
+ cycler==0.12.1
34
+ datasets==2.18.0
35
+ debugpy==1.8.0
36
+ decorator==5.1.1
37
+ defusedxml==0.7.1
38
+ dill==0.3.7
39
+ dm-tree==0.1.8
40
+ docker-pycreds==0.4.0
41
+ docstring-parser==0.15
42
+ einops==0.7.0
43
+ etils==1.3.0
44
+ evaluate==0.4.1
45
+ exceptiongroup==1.2.0
46
+ executing==2.0.1
47
+ faster-whisper==1.0.1
48
+ fastjsonschema==2.19.1
49
+ filelock==3.13.1
50
+ flash-attn==2.5.3
51
+ flatbuffers==23.5.26
52
+ flax==0.7.2
53
+ fonttools==4.48.1
54
+ fqdn==1.5.1
55
+ frozenlist==1.4.1
56
+ fsspec==2024.2.0
57
+ gast==0.4.0
58
+ gitdb==4.0.11
59
+ gitpython==3.1.41
60
+ google-auth-oauthlib==1.0.0
61
+ google-auth==2.27.0
62
+ google-pasta==0.2.0
63
+ grpcio==1.60.1
64
+ h11==0.14.0
65
+ h5py==3.10.0
66
+ httpcore==1.0.2
67
+ httpx==0.26.0
68
+ huggingface-hub==0.21.4
69
+ humanfriendly==10.0
70
+ idna==3.6
71
+ importlib-metadata==7.0.1
72
+ importlib-resources==6.1.1
73
+ iniconfig==2.0.0
74
+ ipdb==0.13.13
75
+ ipykernel==6.29.2
76
+ ipython==8.12.3
77
+ isoduration==20.11.0
78
+ jax==0.4.13
79
+ jaxlib==0.4.13
80
+ jedi==0.19.1
81
+ jinja2==3.1.2
82
+ jiwer==3.0.3
83
+ joblib==1.3.2
84
+ json5==0.9.14
85
+ jsonpointer==2.4
86
+ jsonschema-specifications==2023.12.1
87
+ jsonschema==4.21.1
88
+ jupyter-client==8.6.0
89
+ jupyter-core==5.7.1
90
+ jupyter-events==0.9.0
91
+ jupyter-lsp==2.2.2
92
+ jupyter-server-terminals==0.5.2
93
+ jupyter-server==2.12.5
94
+ jupyterlab-pygments==0.3.0
95
+ jupyterlab-server==2.25.2
96
+ jupyterlab==4.1.0
97
+ keras==2.13.1
98
+ kiwisolver==1.4.5
99
+ lazy-loader==0.3
100
+ libclang==16.0.6
101
+ librosa==0.10.1
102
+ llvmlite==0.41.1
103
+ markdown-it-py==3.0.0
104
+ markdown==3.5.2
105
+ markupsafe==2.1.3
106
+ matplotlib-inline==0.1.6
107
+ matplotlib==3.7.4
108
+ mdurl==0.1.2
109
+ mistune==3.0.2
110
+ ml-dtypes==0.2.0
111
+ more-itertools==10.2.0
112
+ mpmath==1.2.1
113
+ msclap==1.3.3
114
+ msgpack==1.0.7
115
+ multidict==6.0.5
116
+ multiprocess==0.70.15
117
+ nbclient==0.9.0
118
+ nbconvert==7.16.0
119
+ nbformat==5.9.2
120
+ nest-asyncio==1.6.0
121
+ networkx==3.0rc1
122
+ ninja==1.11.1.1
123
+ notebook-shim==0.2.3
124
+ numba==0.58.1
125
+ numpy==1.24.3
126
+ nvidia-cublas-cu12==12.1.3.1
127
+ nvidia-cuda-cupti-cu12==12.1.105
128
+ nvidia-cuda-nvrtc-cu12==12.1.105
129
+ nvidia-cuda-runtime-cu12==12.1.105
130
+ nvidia-cudnn-cu12==8.9.2.26
131
+ nvidia-cufft-cu12==11.0.2.54
132
+ nvidia-curand-cu12==10.3.2.106
133
+ nvidia-cusolver-cu12==11.4.5.107
134
+ nvidia-cusparse-cu12==12.1.0.106
135
+ nvidia-nccl-cu12==2.19.3
136
+ nvidia-nvjitlink-cu12==12.1.105
137
+ nvidia-nvtx-cu12==12.1.105
138
+ oauthlib==3.2.2
139
+ onnxruntime==1.17.1
140
+ openai-whisper==20231117
141
+ opt-einsum==3.3.0
142
+ optax==0.1.8
143
+ orbax-checkpoint==0.2.3
144
+ overrides==7.7.0
145
+ packaging==23.2
146
+ pandas==2.0.3
147
+ pandocfilters==1.5.1
148
+ parameterized==0.9.0
149
+ parso==0.8.3
150
+ peft==0.8.2
151
+ pexpect==4.9.0
152
+ pickleshare==0.7.5
153
+ pillow==9.3.0
154
+ pip==24.0
155
+ pkg-resources==0.0.0
156
+ pkgutil-resolve-name==1.3.10
157
+ platformdirs==4.2.0
158
+ pluggy==1.4.0
159
+ pooch==1.8.0
160
+ prometheus-client==0.19.0
161
+ prompt-toolkit==3.0.43
162
+ protobuf==4.25.2
163
+ psutil==5.9.8
164
+ ptyprocess==0.7.0
165
+ pure-eval==0.2.2
166
+ pyarrow-hotfix==0.6
167
+ pyarrow==15.0.0
168
+ pyasn1-modules==0.3.0
169
+ pyasn1==0.5.1
170
+ pycparser==2.21
171
+ pygments==2.17.2
172
+ pyparsing==3.1.1
173
+ pytest==7.4.4
174
+ python-dateutil==2.8.2
175
+ python-json-logger==2.0.7
176
+ pytorch-triton==3.0.0+901819d2b6
177
+ pytz==2024.1
178
+ pyyaml==6.0.1
179
+ pyzmq==25.1.2
180
+ rapidfuzz==3.6.1
181
+ referencing==0.33.0
182
+ regex==2023.12.25
183
+ requests-oauthlib==1.3.1
184
+ requests==2.31.0
185
+ responses==0.18.0
186
+ rfc3339-validator==0.1.4
187
+ rfc3986-validator==0.1.1
188
+ rich==13.7.0
189
+ rpds-py==0.17.1
190
+ rsa==4.9
191
+ safetensors==0.4.2
192
+ scikit-learn==1.3.2
193
+ scipy==1.10.1
194
+ send2trash==1.8.2
195
+ sentry-sdk==1.40.0
196
+ setproctitle==1.3.3
197
+ setuptools==44.0.0
198
+ shtab==1.7.0
199
+ six==1.16.0
200
+ smmap==5.0.1
201
+ sniffio==1.3.0
202
+ soundfile==0.12.1
203
+ soupsieve==2.5
204
+ soxr==0.3.7
205
+ stack-data==0.6.3
206
+ sympy==1.11.1
207
+ tensorboard-data-server==0.7.2
208
+ tensorboard==2.13.0
209
+ tensorflow-cpu==2.13.1
210
+ tensorflow-estimator==2.13.0
211
+ tensorflow-io-gcs-filesystem==0.34.0
212
+ tensorstore==0.1.45
213
+ termcolor==2.4.0
214
+ terminado==0.18.0
215
+ threadpoolctl==3.2.0
216
+ tiktoken==0.6.0
217
+ tinycss2==1.2.1
218
+ tokenizers==0.15.1
219
+ tomli==2.0.1
220
+ toolz==0.12.1
221
+ torch==2.2.1
222
+ torchaudio==2.2.1
223
+ torchlibrosa==0.1.0
224
+ torchvision==0.17.1
225
+ tornado==6.4
226
+ tqdm==4.66.1
227
+ traitlets==5.14.1
228
+ transformers==4.39.0.dev0
229
+ triton==2.2.0
230
+ trl==0.7.11
231
+ types-python-dateutil==2.8.19.20240106
232
+ typing-extensions==4.9.0
233
+ tyro==0.7.3
234
+ tzdata==2023.4
235
+ uri-template==1.3.0
236
+ urllib3==2.2.0
237
+ wandb==0.16.2
238
+ wcwidth==0.2.13
239
+ webcolors==1.13
240
+ webencodings==0.5.1
241
+ websocket-client==1.7.0
242
+ werkzeug==3.0.1
243
+ wheel==0.42.0
244
+ wrapt==1.16.0
245
+ xxhash==3.4.1
246
+ yarl==1.9.4
247
+ zipp==3.17.0
wandb/run-20240327_131020-92ximxsk/files/wandb-metadata.json ADDED
@@ -0,0 +1,738 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-5.4.0-166-generic-x86_64-with-glibc2.29",
3
+ "python": "3.8.10",
4
+ "heartbeatAt": "2024-03-27T12:10:21.402102",
5
+ "startedAt": "2024-03-27T12:10:20.460803",
6
+ "docker": null,
7
+ "cuda": null,
8
+ "args": [
9
+ "--model_name_or_path=distil-whisper/distil-large-v3",
10
+ "--dataset_name=mozilla-foundation/common_voice_16_1",
11
+ "--dataset_config_name=hi",
12
+ "--language=hindi",
13
+ "--train_split_name=train+validation",
14
+ "--eval_split_name=test",
15
+ "--max_steps=5000",
16
+ "--output_dir=./",
17
+ "--per_device_train_batch_size=32",
18
+ "--per_device_eval_batch_size=32",
19
+ "--logging_steps=25",
20
+ "--learning_rate=1e-4",
21
+ "--warmup_steps=500",
22
+ "--evaluation_strategy=steps",
23
+ "--eval_steps=1000",
24
+ "--save_strategy=steps",
25
+ "--save_steps=1000",
26
+ "--generation_max_length=225",
27
+ "--preprocessing_num_workers=1",
28
+ "--dataloader_num_workers=4",
29
+ "--length_column_name=input_length",
30
+ "--max_duration_in_seconds=30",
31
+ "--text_column_name=sentence",
32
+ "--freeze_feature_encoder=False",
33
+ "--freeze_encoder",
34
+ "--gradient_checkpointing",
35
+ "--fp16",
36
+ "--overwrite_output_dir",
37
+ "--do_train",
38
+ "--do_eval",
39
+ "--predict_with_generate",
40
+ "--use_auth_token",
41
+ "--push_to_hub"
42
+ ],
43
+ "state": "running",
44
+ "program": "run_speech_recognition_seq2seq.py",
45
+ "codePathLocal": "run_speech_recognition_seq2seq.py",
46
+ "codePath": "run_speech_recognition_seq2seq.py",
47
+ "git": {
48
+ "remote": "https://huggingface.co/sanchit-gandhi/distil-large-v3-hi-ft-frozen-encoder",
49
+ "commit": "e7946df277d73ac75c34c2017b01c6d39e0275cd"
50
+ },
51
+ "email": "[email protected]",
52
+ "root": "/home/sanchit/distil-large-v3-hi-ft-frozen-encoder",
53
+ "host": "hf-dgx-01",
54
+ "username": "sanchit",
55
+ "executable": "/home/sanchit/hf/bin/python",
56
+ "cpu_count": 64,
57
+ "cpu_count_logical": 128,
58
+ "cpu_freq": {
59
+ "current": 2091.9220156250008,
60
+ "min": 1500.0,
61
+ "max": 2250.0
62
+ },
63
+ "cpu_freq_per_core": [
64
+ {
65
+ "current": 2062.097,
66
+ "min": 1500.0,
67
+ "max": 2250.0
68
+ },
69
+ {
70
+ "current": 2724.274,
71
+ "min": 1500.0,
72
+ "max": 2250.0
73
+ },
74
+ {
75
+ "current": 1667.938,
76
+ "min": 1500.0,
77
+ "max": 2250.0
78
+ },
79
+ {
80
+ "current": 1666.284,
81
+ "min": 1500.0,
82
+ "max": 2250.0
83
+ },
84
+ {
85
+ "current": 3355.761,
86
+ "min": 1500.0,
87
+ "max": 2250.0
88
+ },
89
+ {
90
+ "current": 1666.795,
91
+ "min": 1500.0,
92
+ "max": 2250.0
93
+ },
94
+ {
95
+ "current": 2256.069,
96
+ "min": 1500.0,
97
+ "max": 2250.0
98
+ },
99
+ {
100
+ "current": 3304.206,
101
+ "min": 1500.0,
102
+ "max": 2250.0
103
+ },
104
+ {
105
+ "current": 3305.275,
106
+ "min": 1500.0,
107
+ "max": 2250.0
108
+ },
109
+ {
110
+ "current": 1842.442,
111
+ "min": 1500.0,
112
+ "max": 2250.0
113
+ },
114
+ {
115
+ "current": 1670.049,
116
+ "min": 1500.0,
117
+ "max": 2250.0
118
+ },
119
+ {
120
+ "current": 1666.007,
121
+ "min": 1500.0,
122
+ "max": 2250.0
123
+ },
124
+ {
125
+ "current": 1717.327,
126
+ "min": 1500.0,
127
+ "max": 2250.0
128
+ },
129
+ {
130
+ "current": 1712.391,
131
+ "min": 1500.0,
132
+ "max": 2250.0
133
+ },
134
+ {
135
+ "current": 3243.458,
136
+ "min": 1500.0,
137
+ "max": 2250.0
138
+ },
139
+ {
140
+ "current": 1715.474,
141
+ "min": 1500.0,
142
+ "max": 2250.0
143
+ },
144
+ {
145
+ "current": 2376.574,
146
+ "min": 1500.0,
147
+ "max": 2250.0
148
+ },
149
+ {
150
+ "current": 1791.608,
151
+ "min": 1500.0,
152
+ "max": 2250.0
153
+ },
154
+ {
155
+ "current": 1797.039,
156
+ "min": 1500.0,
157
+ "max": 2250.0
158
+ },
159
+ {
160
+ "current": 2381.749,
161
+ "min": 1500.0,
162
+ "max": 2250.0
163
+ },
164
+ {
165
+ "current": 1666.779,
166
+ "min": 1500.0,
167
+ "max": 2250.0
168
+ },
169
+ {
170
+ "current": 2507.43,
171
+ "min": 1500.0,
172
+ "max": 2250.0
173
+ },
174
+ {
175
+ "current": 1942.591,
176
+ "min": 1500.0,
177
+ "max": 2250.0
178
+ },
179
+ {
180
+ "current": 1834.474,
181
+ "min": 1500.0,
182
+ "max": 2250.0
183
+ },
184
+ {
185
+ "current": 1712.053,
186
+ "min": 1500.0,
187
+ "max": 2250.0
188
+ },
189
+ {
190
+ "current": 2317.73,
191
+ "min": 1500.0,
192
+ "max": 2250.0
193
+ },
194
+ {
195
+ "current": 1716.361,
196
+ "min": 1500.0,
197
+ "max": 2250.0
198
+ },
199
+ {
200
+ "current": 1712.404,
201
+ "min": 1500.0,
202
+ "max": 2250.0
203
+ },
204
+ {
205
+ "current": 1668.548,
206
+ "min": 1500.0,
207
+ "max": 2250.0
208
+ },
209
+ {
210
+ "current": 3353.025,
211
+ "min": 1500.0,
212
+ "max": 2250.0
213
+ },
214
+ {
215
+ "current": 1665.849,
216
+ "min": 1500.0,
217
+ "max": 2250.0
218
+ },
219
+ {
220
+ "current": 1668.211,
221
+ "min": 1500.0,
222
+ "max": 2250.0
223
+ },
224
+ {
225
+ "current": 3337.438,
226
+ "min": 1500.0,
227
+ "max": 2250.0
228
+ },
229
+ {
230
+ "current": 1858.071,
231
+ "min": 1500.0,
232
+ "max": 2250.0
233
+ },
234
+ {
235
+ "current": 1666.588,
236
+ "min": 1500.0,
237
+ "max": 2250.0
238
+ },
239
+ {
240
+ "current": 1943.458,
241
+ "min": 1500.0,
242
+ "max": 2250.0
243
+ },
244
+ {
245
+ "current": 1720.87,
246
+ "min": 1500.0,
247
+ "max": 2250.0
248
+ },
249
+ {
250
+ "current": 3308.528,
251
+ "min": 1500.0,
252
+ "max": 2250.0
253
+ },
254
+ {
255
+ "current": 1667.175,
256
+ "min": 1500.0,
257
+ "max": 2250.0
258
+ },
259
+ {
260
+ "current": 1668.532,
261
+ "min": 1500.0,
262
+ "max": 2250.0
263
+ },
264
+ {
265
+ "current": 1666.575,
266
+ "min": 1500.0,
267
+ "max": 2250.0
268
+ },
269
+ {
270
+ "current": 3309.308,
271
+ "min": 1500.0,
272
+ "max": 2250.0
273
+ },
274
+ {
275
+ "current": 1892.585,
276
+ "min": 1500.0,
277
+ "max": 2250.0
278
+ },
279
+ {
280
+ "current": 1667.387,
281
+ "min": 1500.0,
282
+ "max": 2250.0
283
+ },
284
+ {
285
+ "current": 1666.733,
286
+ "min": 1500.0,
287
+ "max": 2250.0
288
+ },
289
+ {
290
+ "current": 2995.567,
291
+ "min": 1500.0,
292
+ "max": 2250.0
293
+ },
294
+ {
295
+ "current": 1666.584,
296
+ "min": 1500.0,
297
+ "max": 2250.0
298
+ },
299
+ {
300
+ "current": 1670.102,
301
+ "min": 1500.0,
302
+ "max": 2250.0
303
+ },
304
+ {
305
+ "current": 2328.591,
306
+ "min": 1500.0,
307
+ "max": 2250.0
308
+ },
309
+ {
310
+ "current": 1723.97,
311
+ "min": 1500.0,
312
+ "max": 2250.0
313
+ },
314
+ {
315
+ "current": 1720.779,
316
+ "min": 1500.0,
317
+ "max": 2250.0
318
+ },
319
+ {
320
+ "current": 1721.795,
321
+ "min": 1500.0,
322
+ "max": 2250.0
323
+ },
324
+ {
325
+ "current": 1793.58,
326
+ "min": 1500.0,
327
+ "max": 2250.0
328
+ },
329
+ {
330
+ "current": 1792.09,
331
+ "min": 1500.0,
332
+ "max": 2250.0
333
+ },
334
+ {
335
+ "current": 1737.38,
336
+ "min": 1500.0,
337
+ "max": 2250.0
338
+ },
339
+ {
340
+ "current": 2008.131,
341
+ "min": 1500.0,
342
+ "max": 2250.0
343
+ },
344
+ {
345
+ "current": 1592.265,
346
+ "min": 1500.0,
347
+ "max": 2250.0
348
+ },
349
+ {
350
+ "current": 3035.635,
351
+ "min": 1500.0,
352
+ "max": 2250.0
353
+ },
354
+ {
355
+ "current": 2202.208,
356
+ "min": 1500.0,
357
+ "max": 2250.0
358
+ },
359
+ {
360
+ "current": 1585.533,
361
+ "min": 1500.0,
362
+ "max": 2250.0
363
+ },
364
+ {
365
+ "current": 1657.34,
366
+ "min": 1500.0,
367
+ "max": 2250.0
368
+ },
369
+ {
370
+ "current": 1653.964,
371
+ "min": 1500.0,
372
+ "max": 2250.0
373
+ },
374
+ {
375
+ "current": 1664.47,
376
+ "min": 1500.0,
377
+ "max": 2250.0
378
+ },
379
+ {
380
+ "current": 1790.248,
381
+ "min": 1500.0,
382
+ "max": 2250.0
383
+ },
384
+ {
385
+ "current": 2982.991,
386
+ "min": 1500.0,
387
+ "max": 2250.0
388
+ },
389
+ {
390
+ "current": 3002.847,
391
+ "min": 1500.0,
392
+ "max": 2250.0
393
+ },
394
+ {
395
+ "current": 3339.387,
396
+ "min": 1500.0,
397
+ "max": 2250.0
398
+ },
399
+ {
400
+ "current": 3001.23,
401
+ "min": 1500.0,
402
+ "max": 2250.0
403
+ },
404
+ {
405
+ "current": 3053.061,
406
+ "min": 1500.0,
407
+ "max": 2250.0
408
+ },
409
+ {
410
+ "current": 3315.226,
411
+ "min": 1500.0,
412
+ "max": 2250.0
413
+ },
414
+ {
415
+ "current": 3061.467,
416
+ "min": 1500.0,
417
+ "max": 2250.0
418
+ },
419
+ {
420
+ "current": 3065.347,
421
+ "min": 1500.0,
422
+ "max": 2250.0
423
+ },
424
+ {
425
+ "current": 3021.985,
426
+ "min": 1500.0,
427
+ "max": 2250.0
428
+ },
429
+ {
430
+ "current": 1983.756,
431
+ "min": 1500.0,
432
+ "max": 2250.0
433
+ },
434
+ {
435
+ "current": 1969.251,
436
+ "min": 1500.0,
437
+ "max": 2250.0
438
+ },
439
+ {
440
+ "current": 2133.598,
441
+ "min": 1500.0,
442
+ "max": 2250.0
443
+ },
444
+ {
445
+ "current": 2195.97,
446
+ "min": 1500.0,
447
+ "max": 2250.0
448
+ },
449
+ {
450
+ "current": 2001.765,
451
+ "min": 1500.0,
452
+ "max": 2250.0
453
+ },
454
+ {
455
+ "current": 2468.943,
456
+ "min": 1500.0,
457
+ "max": 2250.0
458
+ },
459
+ {
460
+ "current": 1767.221,
461
+ "min": 1500.0,
462
+ "max": 2250.0
463
+ },
464
+ {
465
+ "current": 2275.561,
466
+ "min": 1500.0,
467
+ "max": 2250.0
468
+ },
469
+ {
470
+ "current": 2285.99,
471
+ "min": 1500.0,
472
+ "max": 2250.0
473
+ },
474
+ {
475
+ "current": 2259.713,
476
+ "min": 1500.0,
477
+ "max": 2250.0
478
+ },
479
+ {
480
+ "current": 2268.924,
481
+ "min": 1500.0,
482
+ "max": 2250.0
483
+ },
484
+ {
485
+ "current": 2181.503,
486
+ "min": 1500.0,
487
+ "max": 2250.0
488
+ },
489
+ {
490
+ "current": 1972.534,
491
+ "min": 1500.0,
492
+ "max": 2250.0
493
+ },
494
+ {
495
+ "current": 1800.356,
496
+ "min": 1500.0,
497
+ "max": 2250.0
498
+ },
499
+ {
500
+ "current": 2156.703,
501
+ "min": 1500.0,
502
+ "max": 2250.0
503
+ },
504
+ {
505
+ "current": 1666.793,
506
+ "min": 1500.0,
507
+ "max": 2250.0
508
+ },
509
+ {
510
+ "current": 2197.697,
511
+ "min": 1500.0,
512
+ "max": 2250.0
513
+ },
514
+ {
515
+ "current": 1665.176,
516
+ "min": 1500.0,
517
+ "max": 2250.0
518
+ },
519
+ {
520
+ "current": 1668.1,
521
+ "min": 1500.0,
522
+ "max": 2250.0
523
+ },
524
+ {
525
+ "current": 1663.848,
526
+ "min": 1500.0,
527
+ "max": 2250.0
528
+ },
529
+ {
530
+ "current": 2848.406,
531
+ "min": 1500.0,
532
+ "max": 2250.0
533
+ },
534
+ {
535
+ "current": 1664.222,
536
+ "min": 1500.0,
537
+ "max": 2250.0
538
+ },
539
+ {
540
+ "current": 1664.261,
541
+ "min": 1500.0,
542
+ "max": 2250.0
543
+ },
544
+ {
545
+ "current": 3333.904,
546
+ "min": 1500.0,
547
+ "max": 2250.0
548
+ },
549
+ {
550
+ "current": 3262.812,
551
+ "min": 1500.0,
552
+ "max": 2250.0
553
+ },
554
+ {
555
+ "current": 1663.839,
556
+ "min": 1500.0,
557
+ "max": 2250.0
558
+ },
559
+ {
560
+ "current": 2200.178,
561
+ "min": 1500.0,
562
+ "max": 2250.0
563
+ },
564
+ {
565
+ "current": 2195.77,
566
+ "min": 1500.0,
567
+ "max": 2250.0
568
+ },
569
+ {
570
+ "current": 3339.11,
571
+ "min": 1500.0,
572
+ "max": 2250.0
573
+ },
574
+ {
575
+ "current": 1665.556,
576
+ "min": 1500.0,
577
+ "max": 2250.0
578
+ },
579
+ {
580
+ "current": 1664.719,
581
+ "min": 1500.0,
582
+ "max": 2250.0
583
+ },
584
+ {
585
+ "current": 1664.414,
586
+ "min": 1500.0,
587
+ "max": 2250.0
588
+ },
589
+ {
590
+ "current": 3333.012,
591
+ "min": 1500.0,
592
+ "max": 2250.0
593
+ },
594
+ {
595
+ "current": 2105.724,
596
+ "min": 1500.0,
597
+ "max": 2250.0
598
+ },
599
+ {
600
+ "current": 1666.14,
601
+ "min": 1500.0,
602
+ "max": 2250.0
603
+ },
604
+ {
605
+ "current": 1664.983,
606
+ "min": 1500.0,
607
+ "max": 2250.0
608
+ },
609
+ {
610
+ "current": 3290.213,
611
+ "min": 1500.0,
612
+ "max": 2250.0
613
+ },
614
+ {
615
+ "current": 1661.174,
616
+ "min": 1500.0,
617
+ "max": 2250.0
618
+ },
619
+ {
620
+ "current": 1663.383,
621
+ "min": 1500.0,
622
+ "max": 2250.0
623
+ },
624
+ {
625
+ "current": 2254.795,
626
+ "min": 1500.0,
627
+ "max": 2250.0
628
+ },
629
+ {
630
+ "current": 1659.446,
631
+ "min": 1500.0,
632
+ "max": 2250.0
633
+ },
634
+ {
635
+ "current": 2183.105,
636
+ "min": 1500.0,
637
+ "max": 2250.0
638
+ },
639
+ {
640
+ "current": 1840.586,
641
+ "min": 1500.0,
642
+ "max": 2250.0
643
+ },
644
+ {
645
+ "current": 1715.747,
646
+ "min": 1500.0,
647
+ "max": 2250.0
648
+ },
649
+ {
650
+ "current": 1557.125,
651
+ "min": 1500.0,
652
+ "max": 2250.0
653
+ },
654
+ {
655
+ "current": 1690.758,
656
+ "min": 1500.0,
657
+ "max": 2250.0
658
+ },
659
+ {
660
+ "current": 2221.649,
661
+ "min": 1500.0,
662
+ "max": 2250.0
663
+ },
664
+ {
665
+ "current": 2104.639,
666
+ "min": 1500.0,
667
+ "max": 2250.0
668
+ },
669
+ {
670
+ "current": 2933.737,
671
+ "min": 1500.0,
672
+ "max": 2250.0
673
+ },
674
+ {
675
+ "current": 2071.918,
676
+ "min": 1500.0,
677
+ "max": 2250.0
678
+ },
679
+ {
680
+ "current": 2152.249,
681
+ "min": 1500.0,
682
+ "max": 2250.0
683
+ },
684
+ {
685
+ "current": 1610.487,
686
+ "min": 1500.0,
687
+ "max": 2250.0
688
+ },
689
+ {
690
+ "current": 1787.196,
691
+ "min": 1500.0,
692
+ "max": 2250.0
693
+ },
694
+ {
695
+ "current": 1629.086,
696
+ "min": 1500.0,
697
+ "max": 2250.0
698
+ },
699
+ {
700
+ "current": 1648.114,
701
+ "min": 1500.0,
702
+ "max": 2250.0
703
+ }
704
+ ],
705
+ "disk": {
706
+ "/": {
707
+ "total": 1757.8785285949707,
708
+ "used": 1497.0410041809082
709
+ }
710
+ },
711
+ "gpu": "NVIDIA A100-SXM4-80GB",
712
+ "gpu_count": 5,
713
+ "gpu_devices": [
714
+ {
715
+ "name": "NVIDIA A100-SXM4-80GB",
716
+ "memory_total": 85899345920
717
+ },
718
+ {
719
+ "name": "NVIDIA A100-SXM4-80GB",
720
+ "memory_total": 85899345920
721
+ },
722
+ {
723
+ "name": "NVIDIA A100-SXM4-80GB",
724
+ "memory_total": 85899345920
725
+ },
726
+ {
727
+ "name": "NVIDIA DGX Display",
728
+ "memory_total": 4294967296
729
+ },
730
+ {
731
+ "name": "NVIDIA A100-SXM4-80GB",
732
+ "memory_total": 85899345920
733
+ }
734
+ ],
735
+ "memory": {
736
+ "total": 503.5396919250488
737
+ }
738
+ }
wandb/run-20240327_131020-92ximxsk/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"train/loss": 0.1035, "train/grad_norm": 1.0489201545715332, "train/learning_rate": 8.900000000000001e-05, "train/epoch": 4.5, "train/global_step": 1000, "_timestamp": 1711543129.5625114, "_runtime": 1709.0980484485626, "_step": 39, "_wandb": {"runtime": 1726}}
wandb/run-20240327_131020-92ximxsk/logs/debug-internal.log ADDED
The diff for this file is too large to render. See raw diff
 
wandb/run-20240327_131020-92ximxsk/logs/debug.log ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_setup.py:_flush():76] Current SDK version is 0.16.2
2
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_setup.py:_flush():76] Configure stats pid to 1400507
3
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/.config/wandb/settings
4
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/distil-large-v3-hi-ft-frozen-encoder/wandb/settings
5
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program_relpath': 'run_speech_recognition_seq2seq.py', 'program_abspath': '/home/sanchit/distil-large-v3-hi-ft-frozen-encoder/run_speech_recognition_seq2seq.py', 'program': 'run_speech_recognition_seq2seq.py'}
8
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_init.py:_log_setup():526] Logging user logs to /home/sanchit/distil-large-v3-hi-ft-frozen-encoder/wandb/run-20240327_131020-92ximxsk/logs/debug.log
9
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_init.py:_log_setup():527] Logging internal logs to /home/sanchit/distil-large-v3-hi-ft-frozen-encoder/wandb/run-20240327_131020-92ximxsk/logs/debug-internal.log
10
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_init.py:init():566] calling init triggers
11
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_init.py:init():573] wandb.init called with sweep_config: {}
12
+ config: {}
13
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_init.py:init():616] starting backend
14
+ 2024-03-27 13:10:20,462 INFO MainThread:1400507 [wandb_init.py:init():620] setting up manager
15
+ 2024-03-27 13:10:20,463 INFO MainThread:1400507 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
16
+ 2024-03-27 13:10:20,464 INFO MainThread:1400507 [wandb_init.py:init():628] backend started and connected
17
+ 2024-03-27 13:10:20,467 INFO MainThread:1400507 [wandb_init.py:init():720] updated telemetry
18
+ 2024-03-27 13:10:20,537 INFO MainThread:1400507 [wandb_init.py:init():753] communicating run to backend with 90.0 second timeout
19
+ 2024-03-27 13:10:20,850 INFO MainThread:1400507 [wandb_run.py:_on_init():2254] communicating current version
20
+ 2024-03-27 13:10:20,878 INFO MainThread:1400507 [wandb_run.py:_on_init():2263] got version response upgrade_message: "wandb version 0.16.5 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
21
+
22
+ 2024-03-27 13:10:20,878 INFO MainThread:1400507 [wandb_init.py:init():804] starting run threads in backend
23
+ 2024-03-27 13:10:21,428 INFO MainThread:1400507 [wandb_run.py:_console_start():2233] atexit reg
24
+ 2024-03-27 13:10:21,428 INFO MainThread:1400507 [wandb_run.py:_redirect():2088] redirect: wrap_raw
25
+ 2024-03-27 13:10:21,428 INFO MainThread:1400507 [wandb_run.py:_redirect():2153] Wrapping output streams.
26
+ 2024-03-27 13:10:21,428 INFO MainThread:1400507 [wandb_run.py:_redirect():2178] Redirects installed.
27
+ 2024-03-27 13:10:21,429 INFO MainThread:1400507 [wandb_init.py:init():847] run started, returning control to user process
28
+ 2024-03-27 13:10:21,431 INFO MainThread:1400507 [wandb_run.py:_config_callback():1342] config_cb None None {'vocab_size': 51866, 'num_mel_bins': 128, 'd_model': 1280, 'encoder_layers': 32, 'encoder_attention_heads': 20, 'decoder_layers': 2, 'decoder_attention_heads': 20, 'decoder_ffn_dim': 5120, 'encoder_ffn_dim': 5120, 'dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.0, 'activation_function': 'gelu', 'init_std': 0.02, 'encoder_layerdrop': 0.0, 'decoder_layerdrop': 0.0, 'use_cache': True, 'num_hidden_layers': 32, 'scale_embedding': False, 'max_source_positions': 1500, 'max_target_positions': 448, 'classifier_proj_size': 256, 'use_weighted_layer_sum': False, 'apply_spec_augment': False, 'mask_time_prob': 0.05, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.0, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'median_filter_width': 7, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': True, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 448, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': [220, 50257], 'architectures': ['WhisperForConditionalGeneration'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 50257, 'pad_token_id': 50256, 'eos_token_id': 50257, 'sep_token_id': None, 'decoder_start_token_id': 50258, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'distil-whisper/distil-large-v3', 'transformers_version': '4.40.0.dev0', 'model_type': 'whisper', 'forced_decoder_ids': None, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 32, 'per_device_eval_batch_size': 32, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 3.0, 'max_steps': 5000, 'lr_scheduler_type': 'linear', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.0, 'warmup_steps': 500, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': './runs/Mar27_13-10-05_hf-dgx-01', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 25, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 1000, 'save_total_limit': None, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': 1000, 'dataloader_num_workers': 4, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'adamw_torch', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'input_length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None, 'sortish_sampler': False, 'predict_with_generate': True, 'generation_max_length': 225, 'generation_num_beams': None, 'generation_config': None}
29
+ 2024-03-27 13:39:14,603 WARNING MsgRouterThr:1400507 [router.py:message_loop():77] message_loop has been closed
wandb/run-20240327_131020-92ximxsk/run-92ximxsk.wandb ADDED
Binary file (548 kB). View file
 
wandb/run-20240327_141033-golaq7b9/files/config.yaml ADDED
@@ -0,0 +1,751 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _wandb:
4
+ desc: null
5
+ value:
6
+ python_version: 3.8.10
7
+ cli_version: 0.16.2
8
+ framework: huggingface
9
+ huggingface_version: 4.40.0.dev0
10
+ is_jupyter_run: false
11
+ is_kaggle_kernel: false
12
+ start_time: 1711545033.213735
13
+ t:
14
+ 1:
15
+ - 1
16
+ - 2
17
+ - 3
18
+ - 5
19
+ - 11
20
+ - 12
21
+ - 49
22
+ - 51
23
+ - 53
24
+ - 55
25
+ - 71
26
+ - 98
27
+ - 100
28
+ 2:
29
+ - 1
30
+ - 2
31
+ - 3
32
+ - 5
33
+ - 11
34
+ - 12
35
+ - 49
36
+ - 51
37
+ - 53
38
+ - 55
39
+ - 71
40
+ - 98
41
+ - 100
42
+ 3:
43
+ - 7
44
+ - 23
45
+ 4: 3.8.10
46
+ 5: 0.16.2
47
+ 6: 4.40.0.dev0
48
+ 8:
49
+ - 5
50
+ 9:
51
+ 1: transformers_trainer
52
+ 13: linux-x86_64
53
+ m:
54
+ - 1: train/global_step
55
+ 6:
56
+ - 3
57
+ - 1: train/loss
58
+ 5: 1
59
+ 6:
60
+ - 1
61
+ - 1: train/grad_norm
62
+ 5: 1
63
+ 6:
64
+ - 1
65
+ - 1: train/learning_rate
66
+ 5: 1
67
+ 6:
68
+ - 1
69
+ - 1: train/epoch
70
+ 5: 1
71
+ 6:
72
+ - 1
73
+ - 1: eval/loss
74
+ 5: 1
75
+ 6:
76
+ - 1
77
+ - 1: eval/wer
78
+ 5: 1
79
+ 6:
80
+ - 1
81
+ - 1: eval/runtime
82
+ 5: 1
83
+ 6:
84
+ - 1
85
+ - 1: eval/samples_per_second
86
+ 5: 1
87
+ 6:
88
+ - 1
89
+ - 1: eval/steps_per_second
90
+ 5: 1
91
+ 6:
92
+ - 1
93
+ vocab_size:
94
+ desc: null
95
+ value: 51866
96
+ num_mel_bins:
97
+ desc: null
98
+ value: 128
99
+ d_model:
100
+ desc: null
101
+ value: 1280
102
+ encoder_layers:
103
+ desc: null
104
+ value: 32
105
+ encoder_attention_heads:
106
+ desc: null
107
+ value: 20
108
+ decoder_layers:
109
+ desc: null
110
+ value: 2
111
+ decoder_attention_heads:
112
+ desc: null
113
+ value: 20
114
+ decoder_ffn_dim:
115
+ desc: null
116
+ value: 5120
117
+ encoder_ffn_dim:
118
+ desc: null
119
+ value: 5120
120
+ dropout:
121
+ desc: null
122
+ value: 0.0
123
+ attention_dropout:
124
+ desc: null
125
+ value: 0.0
126
+ activation_dropout:
127
+ desc: null
128
+ value: 0.0
129
+ activation_function:
130
+ desc: null
131
+ value: gelu
132
+ init_std:
133
+ desc: null
134
+ value: 0.02
135
+ encoder_layerdrop:
136
+ desc: null
137
+ value: 0.0
138
+ decoder_layerdrop:
139
+ desc: null
140
+ value: 0.0
141
+ use_cache:
142
+ desc: null
143
+ value: true
144
+ num_hidden_layers:
145
+ desc: null
146
+ value: 32
147
+ scale_embedding:
148
+ desc: null
149
+ value: false
150
+ max_source_positions:
151
+ desc: null
152
+ value: 1500
153
+ max_target_positions:
154
+ desc: null
155
+ value: 448
156
+ classifier_proj_size:
157
+ desc: null
158
+ value: 256
159
+ use_weighted_layer_sum:
160
+ desc: null
161
+ value: false
162
+ apply_spec_augment:
163
+ desc: null
164
+ value: false
165
+ mask_time_prob:
166
+ desc: null
167
+ value: 0.05
168
+ mask_time_length:
169
+ desc: null
170
+ value: 10
171
+ mask_time_min_masks:
172
+ desc: null
173
+ value: 2
174
+ mask_feature_prob:
175
+ desc: null
176
+ value: 0.0
177
+ mask_feature_length:
178
+ desc: null
179
+ value: 10
180
+ mask_feature_min_masks:
181
+ desc: null
182
+ value: 0
183
+ median_filter_width:
184
+ desc: null
185
+ value: 7
186
+ return_dict:
187
+ desc: null
188
+ value: true
189
+ output_hidden_states:
190
+ desc: null
191
+ value: false
192
+ output_attentions:
193
+ desc: null
194
+ value: false
195
+ torchscript:
196
+ desc: null
197
+ value: false
198
+ torch_dtype:
199
+ desc: null
200
+ value: float16
201
+ use_bfloat16:
202
+ desc: null
203
+ value: false
204
+ tf_legacy_loss:
205
+ desc: null
206
+ value: false
207
+ pruned_heads:
208
+ desc: null
209
+ value: {}
210
+ tie_word_embeddings:
211
+ desc: null
212
+ value: true
213
+ chunk_size_feed_forward:
214
+ desc: null
215
+ value: 0
216
+ is_encoder_decoder:
217
+ desc: null
218
+ value: true
219
+ is_decoder:
220
+ desc: null
221
+ value: false
222
+ cross_attention_hidden_size:
223
+ desc: null
224
+ value: null
225
+ add_cross_attention:
226
+ desc: null
227
+ value: false
228
+ tie_encoder_decoder:
229
+ desc: null
230
+ value: false
231
+ max_length:
232
+ desc: null
233
+ value: 448
234
+ min_length:
235
+ desc: null
236
+ value: 0
237
+ do_sample:
238
+ desc: null
239
+ value: false
240
+ early_stopping:
241
+ desc: null
242
+ value: false
243
+ num_beams:
244
+ desc: null
245
+ value: 1
246
+ num_beam_groups:
247
+ desc: null
248
+ value: 1
249
+ diversity_penalty:
250
+ desc: null
251
+ value: 0.0
252
+ temperature:
253
+ desc: null
254
+ value: 1.0
255
+ top_k:
256
+ desc: null
257
+ value: 50
258
+ top_p:
259
+ desc: null
260
+ value: 1.0
261
+ typical_p:
262
+ desc: null
263
+ value: 1.0
264
+ repetition_penalty:
265
+ desc: null
266
+ value: 1.0
267
+ length_penalty:
268
+ desc: null
269
+ value: 1.0
270
+ no_repeat_ngram_size:
271
+ desc: null
272
+ value: 0
273
+ encoder_no_repeat_ngram_size:
274
+ desc: null
275
+ value: 0
276
+ bad_words_ids:
277
+ desc: null
278
+ value: null
279
+ num_return_sequences:
280
+ desc: null
281
+ value: 1
282
+ output_scores:
283
+ desc: null
284
+ value: false
285
+ return_dict_in_generate:
286
+ desc: null
287
+ value: false
288
+ forced_bos_token_id:
289
+ desc: null
290
+ value: null
291
+ forced_eos_token_id:
292
+ desc: null
293
+ value: null
294
+ remove_invalid_values:
295
+ desc: null
296
+ value: false
297
+ exponential_decay_length_penalty:
298
+ desc: null
299
+ value: null
300
+ suppress_tokens:
301
+ desc: null
302
+ value: null
303
+ begin_suppress_tokens:
304
+ desc: null
305
+ value:
306
+ - 220
307
+ - 50257
308
+ architectures:
309
+ desc: null
310
+ value:
311
+ - WhisperForConditionalGeneration
312
+ finetuning_task:
313
+ desc: null
314
+ value: null
315
+ id2label:
316
+ desc: null
317
+ value:
318
+ '0': LABEL_0
319
+ '1': LABEL_1
320
+ label2id:
321
+ desc: null
322
+ value:
323
+ LABEL_0: 0
324
+ LABEL_1: 1
325
+ tokenizer_class:
326
+ desc: null
327
+ value: null
328
+ prefix:
329
+ desc: null
330
+ value: null
331
+ bos_token_id:
332
+ desc: null
333
+ value: 50257
334
+ pad_token_id:
335
+ desc: null
336
+ value: 50256
337
+ eos_token_id:
338
+ desc: null
339
+ value: 50257
340
+ sep_token_id:
341
+ desc: null
342
+ value: null
343
+ decoder_start_token_id:
344
+ desc: null
345
+ value: 50258
346
+ task_specific_params:
347
+ desc: null
348
+ value: null
349
+ problem_type:
350
+ desc: null
351
+ value: null
352
+ _name_or_path:
353
+ desc: null
354
+ value: distil-whisper/distil-large-v3
355
+ transformers_version:
356
+ desc: null
357
+ value: 4.40.0.dev0
358
+ model_type:
359
+ desc: null
360
+ value: whisper
361
+ forced_decoder_ids:
362
+ desc: null
363
+ value: null
364
+ output_dir:
365
+ desc: null
366
+ value: ./
367
+ overwrite_output_dir:
368
+ desc: null
369
+ value: true
370
+ do_train:
371
+ desc: null
372
+ value: true
373
+ do_eval:
374
+ desc: null
375
+ value: true
376
+ do_predict:
377
+ desc: null
378
+ value: false
379
+ evaluation_strategy:
380
+ desc: null
381
+ value: steps
382
+ prediction_loss_only:
383
+ desc: null
384
+ value: false
385
+ per_device_train_batch_size:
386
+ desc: null
387
+ value: 32
388
+ per_device_eval_batch_size:
389
+ desc: null
390
+ value: 32
391
+ per_gpu_train_batch_size:
392
+ desc: null
393
+ value: null
394
+ per_gpu_eval_batch_size:
395
+ desc: null
396
+ value: null
397
+ gradient_accumulation_steps:
398
+ desc: null
399
+ value: 1
400
+ eval_accumulation_steps:
401
+ desc: null
402
+ value: null
403
+ eval_delay:
404
+ desc: null
405
+ value: 0
406
+ learning_rate:
407
+ desc: null
408
+ value: 0.0001
409
+ weight_decay:
410
+ desc: null
411
+ value: 0.0
412
+ adam_beta1:
413
+ desc: null
414
+ value: 0.9
415
+ adam_beta2:
416
+ desc: null
417
+ value: 0.999
418
+ adam_epsilon:
419
+ desc: null
420
+ value: 1.0e-08
421
+ max_grad_norm:
422
+ desc: null
423
+ value: 1.0
424
+ num_train_epochs:
425
+ desc: null
426
+ value: 3.0
427
+ max_steps:
428
+ desc: null
429
+ value: 5000
430
+ lr_scheduler_type:
431
+ desc: null
432
+ value: linear
433
+ lr_scheduler_kwargs:
434
+ desc: null
435
+ value: {}
436
+ warmup_ratio:
437
+ desc: null
438
+ value: 0.0
439
+ warmup_steps:
440
+ desc: null
441
+ value: 500
442
+ log_level:
443
+ desc: null
444
+ value: passive
445
+ log_level_replica:
446
+ desc: null
447
+ value: warning
448
+ log_on_each_node:
449
+ desc: null
450
+ value: true
451
+ logging_dir:
452
+ desc: null
453
+ value: ./runs/Mar27_14-10-22_hf-dgx-01
454
+ logging_strategy:
455
+ desc: null
456
+ value: steps
457
+ logging_first_step:
458
+ desc: null
459
+ value: false
460
+ logging_steps:
461
+ desc: null
462
+ value: 25
463
+ logging_nan_inf_filter:
464
+ desc: null
465
+ value: true
466
+ save_strategy:
467
+ desc: null
468
+ value: steps
469
+ save_steps:
470
+ desc: null
471
+ value: 1000
472
+ save_total_limit:
473
+ desc: null
474
+ value: null
475
+ save_safetensors:
476
+ desc: null
477
+ value: true
478
+ save_on_each_node:
479
+ desc: null
480
+ value: false
481
+ save_only_model:
482
+ desc: null
483
+ value: false
484
+ no_cuda:
485
+ desc: null
486
+ value: false
487
+ use_cpu:
488
+ desc: null
489
+ value: false
490
+ use_mps_device:
491
+ desc: null
492
+ value: false
493
+ seed:
494
+ desc: null
495
+ value: 42
496
+ data_seed:
497
+ desc: null
498
+ value: null
499
+ jit_mode_eval:
500
+ desc: null
501
+ value: false
502
+ use_ipex:
503
+ desc: null
504
+ value: false
505
+ bf16:
506
+ desc: null
507
+ value: false
508
+ fp16:
509
+ desc: null
510
+ value: true
511
+ fp16_opt_level:
512
+ desc: null
513
+ value: O1
514
+ half_precision_backend:
515
+ desc: null
516
+ value: auto
517
+ bf16_full_eval:
518
+ desc: null
519
+ value: false
520
+ fp16_full_eval:
521
+ desc: null
522
+ value: false
523
+ tf32:
524
+ desc: null
525
+ value: null
526
+ local_rank:
527
+ desc: null
528
+ value: 0
529
+ ddp_backend:
530
+ desc: null
531
+ value: null
532
+ tpu_num_cores:
533
+ desc: null
534
+ value: null
535
+ tpu_metrics_debug:
536
+ desc: null
537
+ value: false
538
+ debug:
539
+ desc: null
540
+ value: []
541
+ dataloader_drop_last:
542
+ desc: null
543
+ value: false
544
+ eval_steps:
545
+ desc: null
546
+ value: 1000
547
+ dataloader_num_workers:
548
+ desc: null
549
+ value: 4
550
+ dataloader_prefetch_factor:
551
+ desc: null
552
+ value: null
553
+ past_index:
554
+ desc: null
555
+ value: -1
556
+ run_name:
557
+ desc: null
558
+ value: ./
559
+ disable_tqdm:
560
+ desc: null
561
+ value: false
562
+ remove_unused_columns:
563
+ desc: null
564
+ value: true
565
+ label_names:
566
+ desc: null
567
+ value: null
568
+ load_best_model_at_end:
569
+ desc: null
570
+ value: false
571
+ metric_for_best_model:
572
+ desc: null
573
+ value: null
574
+ greater_is_better:
575
+ desc: null
576
+ value: null
577
+ ignore_data_skip:
578
+ desc: null
579
+ value: false
580
+ fsdp:
581
+ desc: null
582
+ value: []
583
+ fsdp_min_num_params:
584
+ desc: null
585
+ value: 0
586
+ fsdp_config:
587
+ desc: null
588
+ value:
589
+ min_num_params: 0
590
+ xla: false
591
+ xla_fsdp_v2: false
592
+ xla_fsdp_grad_ckpt: false
593
+ fsdp_transformer_layer_cls_to_wrap:
594
+ desc: null
595
+ value: null
596
+ accelerator_config:
597
+ desc: null
598
+ value:
599
+ split_batches: false
600
+ dispatch_batches: null
601
+ even_batches: true
602
+ use_seedable_sampler: true
603
+ deepspeed:
604
+ desc: null
605
+ value: null
606
+ label_smoothing_factor:
607
+ desc: null
608
+ value: 0.0
609
+ optim:
610
+ desc: null
611
+ value: adamw_torch
612
+ optim_args:
613
+ desc: null
614
+ value: null
615
+ adafactor:
616
+ desc: null
617
+ value: false
618
+ group_by_length:
619
+ desc: null
620
+ value: false
621
+ length_column_name:
622
+ desc: null
623
+ value: input_length
624
+ report_to:
625
+ desc: null
626
+ value:
627
+ - tensorboard
628
+ - wandb
629
+ ddp_find_unused_parameters:
630
+ desc: null
631
+ value: null
632
+ ddp_bucket_cap_mb:
633
+ desc: null
634
+ value: null
635
+ ddp_broadcast_buffers:
636
+ desc: null
637
+ value: null
638
+ dataloader_pin_memory:
639
+ desc: null
640
+ value: true
641
+ dataloader_persistent_workers:
642
+ desc: null
643
+ value: false
644
+ skip_memory_metrics:
645
+ desc: null
646
+ value: true
647
+ use_legacy_prediction_loop:
648
+ desc: null
649
+ value: false
650
+ push_to_hub:
651
+ desc: null
652
+ value: true
653
+ resume_from_checkpoint:
654
+ desc: null
655
+ value: null
656
+ hub_model_id:
657
+ desc: null
658
+ value: null
659
+ hub_strategy:
660
+ desc: null
661
+ value: every_save
662
+ hub_token:
663
+ desc: null
664
+ value: <HUB_TOKEN>
665
+ hub_private_repo:
666
+ desc: null
667
+ value: false
668
+ hub_always_push:
669
+ desc: null
670
+ value: false
671
+ gradient_checkpointing:
672
+ desc: null
673
+ value: true
674
+ gradient_checkpointing_kwargs:
675
+ desc: null
676
+ value: null
677
+ include_inputs_for_metrics:
678
+ desc: null
679
+ value: false
680
+ fp16_backend:
681
+ desc: null
682
+ value: auto
683
+ push_to_hub_model_id:
684
+ desc: null
685
+ value: null
686
+ push_to_hub_organization:
687
+ desc: null
688
+ value: null
689
+ push_to_hub_token:
690
+ desc: null
691
+ value: <PUSH_TO_HUB_TOKEN>
692
+ mp_parameters:
693
+ desc: null
694
+ value: ''
695
+ auto_find_batch_size:
696
+ desc: null
697
+ value: false
698
+ full_determinism:
699
+ desc: null
700
+ value: false
701
+ torchdynamo:
702
+ desc: null
703
+ value: null
704
+ ray_scope:
705
+ desc: null
706
+ value: last
707
+ ddp_timeout:
708
+ desc: null
709
+ value: 1800
710
+ torch_compile:
711
+ desc: null
712
+ value: false
713
+ torch_compile_backend:
714
+ desc: null
715
+ value: null
716
+ torch_compile_mode:
717
+ desc: null
718
+ value: null
719
+ dispatch_batches:
720
+ desc: null
721
+ value: null
722
+ split_batches:
723
+ desc: null
724
+ value: null
725
+ include_tokens_per_second:
726
+ desc: null
727
+ value: false
728
+ include_num_input_tokens_seen:
729
+ desc: null
730
+ value: false
731
+ neftune_noise_alpha:
732
+ desc: null
733
+ value: null
734
+ optim_target_modules:
735
+ desc: null
736
+ value: null
737
+ sortish_sampler:
738
+ desc: null
739
+ value: false
740
+ predict_with_generate:
741
+ desc: null
742
+ value: true
743
+ generation_max_length:
744
+ desc: null
745
+ value: 225
746
+ generation_num_beams:
747
+ desc: null
748
+ value: null
749
+ generation_config:
750
+ desc: null
751
+ value: null
wandb/run-20240327_141033-golaq7b9/files/output.log ADDED
@@ -0,0 +1,1036 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ 0%| | 0/5000 [00:00<?, ?it/s]/home/sanchit/hf/lib/python3.8/site-packages/torch/utils/checkpoint.py:460: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
3
+ warnings.warn(
4
+ /home/sanchit/hf/lib/python3.8/site-packages/torch/utils/checkpoint.py:90: UserWarning: None of the inputs have requires_grad=True. Gradients will be None
5
+ warnings.warn(
6
+ [WARNING|logging.py:329] 2024-03-27 14:10:46,850 >> `use_cache = True` is incompatible with gradient checkpointing. Setting `use_cache = False`...
7
+
8
+
9
+
10
+
11
+
12
+
13
+
14
+
15
+
16
+
17
+
18
+
19
+
20
+
21
+
22
+
23
+
24
+
25
+
26
+ 1%|▍ | 26/5000 [00:58<2:17:23, 1.66s/it]
27
+
28
+
29
+
30
+
31
+
32
+
33
+
34
+
35
+
36
+
37
+
38
+
39
+
40
+
41
+
42
+
43
+
44
+
45
+
46
+
47
+ 1%|▊ | 50/5000 [01:37<2:16:35, 1.66s/it]
48
+
49
+
50
+
51
+
52
+
53
+
54
+
55
+
56
+
57
+
58
+
59
+
60
+
61
+
62
+
63
+
64
+
65
+
66
+
67
+
68
+ 2%|█▏ | 75/5000 [02:19<2:16:00, 1.66s/it]
69
+
70
+
71
+
72
+
73
+
74
+
75
+
76
+
77
+
78
+
79
+
80
+
81
+
82
+
83
+
84
+
85
+
86
+
87
+
88
+
89
+ 2%|█▌ | 101/5000 [03:01<2:15:01, 1.65s/it]
90
+
91
+
92
+
93
+
94
+
95
+
96
+
97
+
98
+
99
+
100
+
101
+
102
+
103
+
104
+
105
+
106
+
107
+
108
+
109
+ 2%|█▉ | 125/5000 [03:41<2:14:23, 1.65s/it]
110
+
111
+
112
+
113
+
114
+
115
+
116
+
117
+
118
+
119
+
120
+
121
+
122
+
123
+
124
+
125
+
126
+
127
+
128
+
129
+
130
+
131
+ 3%|██▎ | 150/5000 [04:23<2:13:39, 1.65s/it]
132
+
133
+
134
+
135
+
136
+
137
+
138
+
139
+
140
+
141
+
142
+
143
+
144
+
145
+
146
+
147
+
148
+
149
+
150
+
151
+
152
+
153
+ 4%|██▋ | 176/5000 [05:06<2:13:03, 1.65s/it]
154
+
155
+
156
+
157
+
158
+
159
+
160
+
161
+
162
+
163
+
164
+
165
+
166
+
167
+
168
+
169
+
170
+
171
+
172
+
173
+
174
+ 4%|███ | 200/5000 [05:45<2:13:02, 1.66s/it]
175
+
176
+
177
+
178
+
179
+
180
+
181
+
182
+
183
+
184
+
185
+
186
+
187
+
188
+
189
+
190
+
191
+
192
+
193
+
194
+
195
+ 4%|███▌ | 225/5000 [06:38<4:24:16, 3.32s/it]
196
+
197
+
198
+
199
+
200
+
201
+
202
+
203
+
204
+
205
+
206
+
207
+
208
+
209
+
210
+
211
+
212
+
213
+
214
+
215
+
216
+
217
+ 5%|███▉ | 250/5000 [07:20<2:11:37, 1.66s/it]
218
+
219
+
220
+
221
+
222
+
223
+
224
+
225
+
226
+
227
+
228
+
229
+
230
+
231
+
232
+
233
+
234
+
235
+
236
+
237
+
238
+
239
+ 6%|████▎ | 275/5000 [08:01<2:10:40, 1.66s/it]
240
+
241
+
242
+
243
+
244
+
245
+
246
+
247
+
248
+
249
+
250
+
251
+
252
+
253
+
254
+
255
+
256
+
257
+
258
+
259
+
260
+ 6%|████▋ | 299/5000 [08:41<2:09:47, 1.66s/it]
261
+
262
+
263
+
264
+
265
+
266
+
267
+
268
+
269
+
270
+
271
+
272
+
273
+
274
+
275
+
276
+
277
+
278
+
279
+
280
+
281
+ 6%|█████ | 323/5000 [09:21<2:09:41, 1.66s/it]
282
+
283
+
284
+
285
+
286
+
287
+
288
+
289
+
290
+
291
+
292
+
293
+
294
+
295
+
296
+
297
+
298
+
299
+
300
+
301
+
302
+
303
+ 7%|█████▍ | 349/5000 [10:04<2:08:45, 1.66s/it]
304
+
305
+
306
+
307
+
308
+
309
+
310
+
311
+
312
+
313
+
314
+
315
+
316
+
317
+
318
+
319
+
320
+
321
+
322
+
323
+
324
+
325
+ 7%|█████▊ | 374/5000 [10:46<2:08:11, 1.66s/it]
326
+
327
+
328
+
329
+
330
+
331
+
332
+
333
+
334
+
335
+
336
+
337
+
338
+
339
+
340
+
341
+
342
+
343
+
344
+
345
+
346
+
347
+ 8%|██████▏ | 399/5000 [11:27<2:07:23, 1.66s/it]
348
+
349
+
350
+
351
+
352
+
353
+
354
+
355
+
356
+
357
+
358
+
359
+
360
+
361
+
362
+
363
+
364
+
365
+
366
+
367
+
368
+ 8%|██████▌ | 424/5000 [12:09<2:06:52, 1.66s/it]
369
+
370
+
371
+
372
+
373
+
374
+
375
+
376
+
377
+
378
+
379
+
380
+
381
+
382
+
383
+
384
+
385
+
386
+
387
+
388
+
389
+
390
+
391
+ 9%|███████ | 449/5000 [13:02<3:14:08, 2.56s/it]
392
+
393
+
394
+
395
+
396
+
397
+
398
+
399
+
400
+
401
+
402
+
403
+
404
+
405
+
406
+
407
+
408
+
409
+
410
+
411
+
412
+
413
+ 9%|███████▍ | 474/5000 [13:43<2:04:46, 1.65s/it]
414
+
415
+
416
+
417
+
418
+
419
+
420
+
421
+
422
+
423
+
424
+
425
+
426
+
427
+
428
+
429
+
430
+
431
+
432
+
433
+
434
+ 10%|███████▊ | 499/5000 [14:24<2:03:36, 1.65s/it]
435
+
436
+
437
+
438
+
439
+
440
+
441
+
442
+
443
+
444
+
445
+
446
+
447
+
448
+
449
+
450
+
451
+
452
+
453
+
454
+
455
+
456
+ 10%|████████▏ | 524/5000 [15:06<2:04:08, 1.66s/it]
457
+
458
+
459
+
460
+
461
+
462
+
463
+
464
+
465
+
466
+
467
+
468
+
469
+
470
+
471
+
472
+
473
+
474
+
475
+
476
+
477
+ 11%|████████▌ | 548/5000 [15:45<2:03:01, 1.66s/it]
478
+
479
+
480
+
481
+
482
+
483
+
484
+
485
+
486
+
487
+
488
+
489
+
490
+
491
+
492
+
493
+
494
+
495
+
496
+
497
+
498
+
499
+ 11%|████████▉ | 574/5000 [16:29<2:02:39, 1.66s/it]
500
+
501
+
502
+
503
+
504
+
505
+
506
+
507
+
508
+
509
+
510
+
511
+
512
+
513
+
514
+
515
+
516
+
517
+
518
+
519
+
520
+
521
+ 12%|█████████▎ | 599/5000 [17:10<2:01:42, 1.66s/it]
522
+
523
+
524
+
525
+
526
+
527
+
528
+
529
+
530
+
531
+
532
+
533
+
534
+
535
+
536
+
537
+
538
+
539
+
540
+
541
+
542
+ 12%|█████████▋ | 623/5000 [17:50<2:01:22, 1.66s/it]
543
+
544
+
545
+
546
+
547
+
548
+
549
+
550
+
551
+
552
+
553
+
554
+
555
+
556
+
557
+
558
+
559
+
560
+
561
+
562
+
563
+ 13%|██████████ | 649/5000 [18:33<2:00:14, 1.66s/it]
564
+
565
+
566
+
567
+
568
+
569
+
570
+
571
+
572
+
573
+
574
+
575
+
576
+
577
+
578
+
579
+
580
+
581
+
582
+
583
+
584
+
585
+ 14%|██████████▌ | 675/5000 [19:28<2:16:11, 1.89s/it]
586
+
587
+
588
+
589
+
590
+
591
+
592
+
593
+
594
+
595
+
596
+
597
+
598
+
599
+
600
+
601
+
602
+
603
+
604
+
605
+
606
+
607
+ 14%|██████████▉ | 700/5000 [20:10<2:00:00, 1.67s/it]
608
+
609
+
610
+
611
+
612
+
613
+
614
+
615
+
616
+
617
+
618
+
619
+
620
+
621
+
622
+
623
+
624
+
625
+
626
+
627
+
628
+
629
+ 14%|███████████▎ | 725/5000 [20:54<1:58:55, 1.67s/it]
630
+
631
+
632
+
633
+
634
+
635
+
636
+
637
+
638
+
639
+
640
+
641
+
642
+
643
+
644
+
645
+
646
+
647
+
648
+
649
+
650
+
651
+ 15%|███████████▋ | 750/5000 [21:36<1:57:57, 1.67s/it]
652
+
653
+
654
+
655
+
656
+
657
+
658
+
659
+
660
+
661
+
662
+
663
+
664
+
665
+
666
+
667
+
668
+
669
+
670
+
671
+ 16%|████████████ | 775/5000 [22:16<2:19:43, 1.98s/it]
672
+
673
+
674
+
675
+
676
+
677
+
678
+
679
+
680
+
681
+
682
+
683
+
684
+
685
+
686
+
687
+
688
+
689
+
690
+
691
+
692
+
693
+ 16%|████████████▍ | 800/5000 [22:59<2:02:55, 1.76s/it]
694
+
695
+
696
+
697
+
698
+
699
+
700
+
701
+
702
+
703
+
704
+
705
+
706
+
707
+
708
+
709
+
710
+
711
+
712
+
713
+
714
+
715
+ 16%|████████████▊ | 825/5000 [23:43<1:55:52, 1.67s/it]
716
+
717
+
718
+
719
+
720
+
721
+
722
+
723
+
724
+
725
+
726
+
727
+
728
+
729
+
730
+
731
+
732
+
733
+
734
+
735
+
736
+
737
+ 17%|█████████████▎ | 850/5000 [24:25<1:55:12, 1.67s/it]
738
+
739
+
740
+
741
+
742
+
743
+
744
+
745
+
746
+
747
+
748
+
749
+
750
+
751
+
752
+
753
+
754
+
755
+
756
+
757
+
758
+
759
+ 18%|█████████████▋ | 875/5000 [25:06<1:54:53, 1.67s/it]
760
+
761
+
762
+
763
+
764
+
765
+
766
+
767
+
768
+
769
+
770
+
771
+
772
+
773
+
774
+
775
+
776
+
777
+
778
+
779
+
780
+
781
+
782
+ 18%|██████████████ | 901/5000 [26:03<1:59:01, 1.74s/it]
783
+
784
+
785
+
786
+
787
+
788
+
789
+
790
+
791
+
792
+
793
+
794
+
795
+
796
+
797
+
798
+
799
+
800
+
801
+
802
+
803
+ 18%|██████████████▍ | 925/5000 [26:44<1:53:49, 1.68s/it]
804
+
805
+
806
+
807
+
808
+
809
+
810
+
811
+
812
+
813
+
814
+
815
+
816
+
817
+
818
+
819
+
820
+
821
+
822
+
823
+
824
+
825
+
826
+ 19%|██████████████▊ | 950/5000 [27:31<1:53:21, 1.68s/it]
827
+
828
+
829
+
830
+
831
+
832
+
833
+
834
+
835
+
836
+
837
+
838
+
839
+
840
+
841
+
842
+
843
+
844
+
845
+
846
+
847
+
848
+ 20%|███████████████▏ | 975/5000 [28:13<1:52:53, 1.68s/it]
849
+
850
+
851
+
852
+
853
+
854
+
855
+
856
+
857
+
858
+
859
+
860
+
861
+
862
+
863
+
864
+
865
+
866
+
867
+
868
+
869
+ 20%|███████████████▍ | 1000/5000 [28:55<1:51:52, 1.68s/it][INFO|trainer.py:768] 2024-03-27 14:39:29,112 >> The following columns in the evaluation set don't have a corresponding argument in `WhisperForConditionalGeneration.forward` and have been ignored: input_length. If input_length are not expected by `WhisperForConditionalGeneration.forward`, you can safely ignore this message.
870
+ [INFO|trainer.py:3515] 2024-03-27 14:39:29,115 >> ***** Running Evaluation *****
871
+ [INFO|trainer.py:3517] 2024-03-27 14:39:29,115 >> Num examples = 3123
872
+ [INFO|trainer.py:3520] 2024-03-27 14:39:29,115 >> Batch size = 32
873
+ {'loss': 0.1015, 'grad_norm': 1.3333516120910645, 'learning_rate': 8.900000000000001e-05, 'epoch': 4.5}
874
+ [INFO|generation_whisper.py:1111] 2024-03-27 14:39:48,640 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
875
+ [INFO|generation_whisper.py:1111] 2024-03-27 14:40:02,207 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
876
+ 0%| | 0/98 [00:00<?, ?it/s][INFO|generation_whisper.py:1111] 2024-03-27 14:40:15,525 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
877
+ 2%|█▋ | 2/98 [00:13<10:39, 6.66s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:40:29,163 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
878
+ 3%|██▌ | 3/98 [00:26<15:08, 9.57s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:40:42,511 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
879
+ 4%|███▍ | 4/98 [00:40<17:11, 10.98s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:40:56,697 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
880
+ 5%|████▎ | 5/98 [00:54<18:44, 12.09s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:41:09,832 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
881
+ 6%|█████▏ | 6/98 [01:07<19:04, 12.44s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:41:23,179 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
882
+ 7%|██████ | 7/98 [01:20<19:18, 12.73s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:41:36,374 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
883
+ 8%|██████▊ | 8/98 [01:34<19:18, 12.88s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:41:50,000 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
884
+ 9%|███████▋ | 9/98 [01:47<19:26, 13.11s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:42:03,309 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
885
+ 10%|████████▍ | 10/98 [02:01<19:19, 13.17s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:42:16,710 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
886
+ 11%|█████████▎ | 11/98 [02:14<19:11, 13.24s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:42:29,772 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
887
+ 12%|██████████▏ | 12/98 [02:27<18:54, 13.19s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:42:42,910 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
888
+ 13%|███████████ | 13/98 [02:40<18:39, 13.17s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:42:56,180 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
889
+ 14%|███████████▊ | 14/98 [02:53<18:28, 13.20s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:43:09,625 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
890
+ 15%|████████████▋ | 15/98 [03:07<18:21, 13.27s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:43:22,865 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
891
+ 16%|█████████████▌ | 16/98 [03:20<18:07, 13.26s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:43:36,109 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
892
+ 17%|██████████████▍ | 17/98 [03:33<17:53, 13.26s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:43:49,320 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
893
+ 18%|███████████████▏ | 18/98 [03:47<17:39, 13.24s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:44:02,150 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
894
+ 19%|████████████████ | 19/98 [03:59<17:16, 13.12s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:44:15,456 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
895
+ 20%|████████████████▉ | 20/98 [04:13<17:07, 13.18s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:44:28,684 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
896
+ 21%|█████████████████▊ | 21/98 [04:26<16:55, 13.19s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:44:41,873 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
897
+ 22%|██████████████████▋ | 22/98 [04:39<16:42, 13.19s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:44:55,038 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
898
+ 23%|███████████████████▍ | 23/98 [04:52<16:28, 13.18s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:45:08,372 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
899
+ 24%|████████████████████▎ | 24/98 [05:06<16:18, 13.23s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:45:21,642 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
900
+ 26%|█████████████████████▏ | 25/98 [05:19<16:06, 13.24s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:45:34,962 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
901
+ 27%|██████████████████████ | 26/98 [05:32<15:55, 13.26s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:45:48,270 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
902
+ 28%|██████████████████████▊ | 27/98 [05:46<15:42, 13.28s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:46:01,574 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
903
+ 29%|███████████████████████▋ | 28/98 [05:59<15:29, 13.29s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:46:14,785 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
904
+ 30%|████████████████████████▌ | 29/98 [06:12<15:15, 13.26s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:46:28,140 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
905
+ 31%|█████████████████████████▍ | 30/98 [06:25<15:03, 13.29s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:46:41,351 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
906
+ 32%|██████████████████████████▎ | 31/98 [06:39<14:48, 13.27s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:46:54,542 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
907
+ 33%|███████████████████████████ | 32/98 [06:52<14:34, 13.24s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:47:07,631 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
908
+ 34%|███████████████████████████▉ | 33/98 [07:05<14:17, 13.20s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:47:20,707 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
909
+ 35%|████████████████████████████▊ | 34/98 [07:18<14:02, 13.16s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:47:34,137 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
910
+ 36%|█████████████████████████████▋ | 35/98 [07:31<13:54, 13.24s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:47:47,223 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
911
+ 37%|██████████████████████████████▍ | 36/98 [07:45<13:38, 13.20s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:48:00,350 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
912
+ 38%|███████████████████████████████▎ | 37/98 [07:58<13:23, 13.17s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:48:13,504 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
913
+ 39%|████████████████████████████████▏ | 38/98 [08:11<13:10, 13.17s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:48:26,796 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
914
+ 40%|█████████████████████████████████ | 39/98 [08:24<12:59, 13.21s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:48:39,996 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
915
+ 41%|█████████████████████████████████▉ | 40/98 [08:37<12:45, 13.20s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:48:53,217 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
916
+ 42%|██████████████████████████████████▋ | 41/98 [08:51<12:32, 13.21s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:49:06,463 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
917
+ 43%|███████████████████████████████████▌ | 42/98 [09:04<12:20, 13.22s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:49:19,783 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
918
+ 44%|████████████████████████████████████▍ | 43/98 [09:17<12:08, 13.25s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:49:33,027 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
919
+ 45%|█████████████████████████████████████▎ | 44/98 [09:30<11:55, 13.25s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:49:46,191 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
920
+ 46%|██████████████████████████████████████ | 45/98 [09:43<11:40, 13.22s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:49:59,613 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
921
+ 47%|██████████████████████████████████████▉ | 46/98 [09:57<11:30, 13.28s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:50:13,473 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
922
+ 48%|███████████████████████████████████████▊ | 47/98 [10:11<11:26, 13.46s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:50:26,719 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
923
+ 49%|████████████████████████████████████████▋ | 48/98 [10:24<11:09, 13.39s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:50:39,916 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
924
+ 50%|█████████████████████████████████████████▌ | 49/98 [10:37<10:53, 13.33s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:50:53,322 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
925
+ 51%|██████████████████████████████████████████▎ | 50/98 [10:51<10:41, 13.36s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:51:06,433 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
926
+ 52%|███████████████████████████████████████████▏ | 51/98 [11:04<10:24, 13.28s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:51:19,767 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
927
+ 53%|██���█████████████████████████████████████████ | 52/98 [11:17<10:11, 13.30s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:51:33,102 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
928
+ 54%|████████████████████████████████████████████▉ | 53/98 [11:30<09:58, 13.31s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:51:46,929 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
929
+ 55%|█████████████████████████████████████████████▋ | 54/98 [11:44<09:52, 13.46s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:52:00,658 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
930
+ 56%|██████████████████████████████████████████████▌ | 55/98 [11:58<09:42, 13.54s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:52:14,642 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
931
+ 57%|███████████████████████████████████████████████▍ | 56/98 [12:12<09:34, 13.68s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:52:28,389 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
932
+ 58%|████████████████████████████████████████████████▎ | 57/98 [12:26<09:21, 13.70s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:52:41,703 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
933
+ 59%|█████████████████████████████████████████████████ | 58/98 [12:39<09:03, 13.58s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:52:55,012 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
934
+ 60%|█████████████████████████████████████████████████▉ | 59/98 [12:52<08:46, 13.50s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:53:08,221 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
935
+ 61%|██████████████████████████████████████████████████▊ | 60/98 [13:06<08:29, 13.41s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:53:21,414 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
936
+ 62%|███████████████████████████████████████████████████▋ | 61/98 [13:19<08:13, 13.35s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:53:31,236 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
937
+ 63%|████████████████████████████████████████████████████▌ | 62/98 [13:29<07:22, 12.29s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:53:36,911 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
938
+ 64%|█████████████████████████████████████████████████████▎ | 63/98 [13:34<06:00, 10.31s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:53:45,248 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
939
+ 65%|██████████████████████████████████████████████████████▏ | 64/98 [13:43<05:30, 9.71s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:53:58,484 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
940
+ 66%|███████████████████████████████████████████████████████ | 65/98 [13:56<05:55, 10.77s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:54:11,569 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
941
+ 67%|███████████████████████████████████████████████████████▉ | 66/98 [14:09<06:06, 11.47s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:54:24,892 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
942
+ 68%|████████████████████████████████████████████████████████▋ | 67/98 [14:22<06:12, 12.02s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:54:38,351 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
943
+ 69%|█████████████████████████████████████████████████████████▌ | 68/98 [14:36<06:13, 12.45s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:54:51,696 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
944
+ 70%|██████████████████████████████████████████████████████████▍ | 69/98 [14:49<06:08, 12.72s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:55:05,111 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
945
+ 71%|███████████████████████████████████████████████████████████▎ | 70/98 [15:02<06:02, 12.93s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:55:18,498 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
946
+ 72%|████████████████████████████████████████████████████████████▏ | 71/98 [15:16<05:52, 13.07s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:55:31,711 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
947
+ 73%|████████████████████████████████████████████████████████████▉ | 72/98 [15:29<05:40, 13.11s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:55:44,925 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
948
+ 74%|█████████████████████████████████████████████████████████████▊ | 73/98 [15:42<05:28, 13.14s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:55:58,303 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
949
+ 76%|██████████████████████████████████████████████████████████████▋ | 74/98 [15:56<05:17, 13.21s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:56:11,661 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
950
+ 77%|███████████████████████████████████████████████████████████████▌ | 75/98 [16:09<05:04, 13.26s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:56:24,787 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
951
+ 78%|████████████████████████████████████████████████████████████████▎ | 76/98 [16:22<04:50, 13.22s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:56:38,149 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
952
+ 79%|█████████████████████████████████████████████████████████████████▏ | 77/98 [16:35<04:38, 13.26s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:56:51,286 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
953
+ 80%|██████████████████████████████████████████████████████████████████ | 78/98 [16:49<04:24, 13.22s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:57:04,486 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
954
+ 81%|██████████████████████████████████████████████████████████████████▉ | 79/98 [17:02<04:11, 13.22s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:57:17,724 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
955
+ 82%|███████████████████████████████████████████████████████████████████▊ | 80/98 [17:15<03:58, 13.22s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:57:30,923 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
956
+ 83%|████████████████████████████████████████████████████████████████████▌ | 81/98 [17:28<03:44, 13.22s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:57:44,176 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
957
+ 84%|█████████████████████████████████████████████████████████████████████▍ | 82/98 [17:41<03:31, 13.23s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:57:57,521 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
958
+ 85%|██████████████████████████████████████████████████████████████████████▎ | 83/98 [17:55<03:18, 13.26s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:58:10,727 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
959
+ 86%|███████████████████████████████████████████████████████████████████████▏ | 84/98 [18:08<03:05, 13.25s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:58:23,903 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
960
+ 87%|███████████████████████████████████████████████████████████████████████▉ | 85/98 [18:21<02:51, 13.22s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:58:37,033 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
961
+ 88%|████████████████████████████████████████████████████████████████████████▊ | 86/98 [18:34<02:38, 13.20s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:58:50,316 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
962
+ 89%|█████████████████████████████████████████████████████████████████████████▋ | 87/98 [18:48<02:25, 13.22s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:59:03,569 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
963
+ 90%|██████████████████████████████████████████████████████████████████████████▌ | 88/98 [19:01<02:12, 13.23s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:59:16,740 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
964
+ 91%|███████████████████████████████████████████████████████████████████████████▍ | 89/98 [19:14<01:58, 13.21s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:59:30,002 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
965
+ 93%|█████████████████████████████████████████████████████████████████████████████ | 91/98 [19:41<01:32, 13.24s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:59:43,255 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
966
+ 94%|█████████████████████████████████████████████████████████████████████████████▉ | 92/98 [19:54<01:19, 13.21s/it][INFO|generation_whisper.py:1111] 2024-03-27 14:59:56,413 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
967
+ 95%|██████████████████████████████████████████████████████████████████████████████▊ | 93/98 [20:07<01:05, 13.20s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:00:09,577 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
968
+ 96%|█████████████████████████████████████████████████████���█████████████████████████▌ | 94/98 [20:20<00:52, 13.15s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:00:22,631 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
969
+ 97%|████████████████████████████████████████████████████████████████████████████████▍ | 95/98 [20:33<00:39, 13.18s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:00:35,873 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
970
+ 98%|█████████████████████████████████████████████████████████████████████████████████▎ | 96/98 [20:47<00:26, 13.29s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:00:49,411 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
971
+ 99%|██████████████████████████████████████████████████████████████████████████████████▏| 97/98 [21:00<00:13, 13.30s/it][INFO|generation_whisper.py:1111] 2024-03-27 15:01:02,870 >> You have passed task=transcribe, but also have set `forced_decoder_ids` to [[1, None], [2, 50360]] which creates a conflict. `forced_decoder_ids` will be ignored in favor of task=transcribe.
972
+ [WARNING|configuration_utils.py:447] 2024-03-27 15:01:11,782 >> Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.of task=transcribe.
973
+ [WARNING|configuration_utils.py:447] 2024-03-27 15:01:11,782 >> Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.of task=transcribe.
974
+ Non-default generation parameters: {'max_length': 448, 'begin_suppress_tokens': [220, 50257]}
975
+ [INFO|configuration_utils.py:471] 2024-03-27 15:01:11,783 >> Configuration saved in ./checkpoint-1000/config.json
976
+ [INFO|configuration_utils.py:697] 2024-03-27 15:01:11,784 >> Configuration saved in ./checkpoint-1000/generation_config.json
977
+ {'eval_loss': 0.3065292239189148, 'eval_wer': 0.3243838368229931, 'eval_runtime': 1302.6648, 'eval_samples_per_second': 2.397, 'eval_steps_per_second': 0.075, 'epoch': 4.5}
978
+ [INFO|modeling_utils.py:2474] 2024-03-27 15:01:19,796 >> Model weights saved in ./checkpoint-1000/model.safetensors
979
+ [INFO|feature_extraction_utils.py:424] 2024-03-27 15:01:19,798 >> Feature extractor saved in ./checkpoint-1000/preprocessor_config.json
980
+ [INFO|feature_extraction_utils.py:424] 2024-03-27 15:01:29,394 >> Feature extractor saved in ./preprocessor_config.json
981
+ /home/sanchit/hf/lib/python3.8/site-packages/torch/utils/checkpoint.py:460: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
982
+ warnings.warn(
983
+ /home/sanchit/hf/lib/python3.8/site-packages/torch/utils/checkpoint.py:90: UserWarning: None of the inputs have requires_grad=True. Gradients will be None
984
+ warnings.warn(
985
+
986
+
987
+
988
+
989
+
990
+
991
+
992
+
993
+
994
+
995
+
996
+
997
+
998
+
999
+
1000
+
1001
+
1002
+
1003
+
1004
+ 20%|███████████████▊ | 1024/5000 [51:35<1:58:29, 1.79s/it]
1005
+
1006
+
1007
+
1008
+
1009
+
1010
+
1011
+
1012
+
1013
+
1014
+
1015
+
1016
+
1017
+
1018
+
1019
+
1020
+
1021
+
1022
+
1023
+
1024
+
1025
+
1026
+ 21%|████████████████▏ | 1049/5000 [52:17<1:51:17, 1.69s/it]
1027
+
1028
+
1029
+
1030
+
1031
+
1032
+
1033
+
1034
+
1035
+
1036
+
wandb/run-20240327_141033-golaq7b9/files/requirements.txt ADDED
@@ -0,0 +1,247 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ absl-py==2.1.0
2
+ accelerate==0.27.2
3
+ aiohttp==3.9.3
4
+ aiosignal==1.3.1
5
+ anyio==4.2.0
6
+ appdirs==1.4.4
7
+ argon2-cffi-bindings==21.2.0
8
+ argon2-cffi==23.1.0
9
+ arrow==1.3.0
10
+ asttokens==2.4.1
11
+ astunparse==1.6.3
12
+ async-lru==2.0.4
13
+ async-timeout==4.0.3
14
+ attrs==23.2.0
15
+ audioread==3.0.1
16
+ av==11.0.0
17
+ babel==2.14.0
18
+ backcall==0.2.0
19
+ beautifulsoup4==4.12.3
20
+ bitsandbytes==0.42.0
21
+ bleach==6.1.0
22
+ cached-property==1.5.2
23
+ cachetools==5.3.2
24
+ certifi==2024.2.2
25
+ cffi==1.16.0
26
+ charset-normalizer==3.3.2
27
+ chex==0.1.7
28
+ click==8.1.7
29
+ coloredlogs==15.0.1
30
+ comm==0.2.1
31
+ contourpy==1.1.1
32
+ ctranslate2==4.1.0
33
+ cycler==0.12.1
34
+ datasets==2.18.0
35
+ debugpy==1.8.0
36
+ decorator==5.1.1
37
+ defusedxml==0.7.1
38
+ dill==0.3.7
39
+ dm-tree==0.1.8
40
+ docker-pycreds==0.4.0
41
+ docstring-parser==0.15
42
+ einops==0.7.0
43
+ etils==1.3.0
44
+ evaluate==0.4.1
45
+ exceptiongroup==1.2.0
46
+ executing==2.0.1
47
+ faster-whisper==1.0.1
48
+ fastjsonschema==2.19.1
49
+ filelock==3.13.1
50
+ flash-attn==2.5.3
51
+ flatbuffers==23.5.26
52
+ flax==0.7.2
53
+ fonttools==4.48.1
54
+ fqdn==1.5.1
55
+ frozenlist==1.4.1
56
+ fsspec==2024.2.0
57
+ gast==0.4.0
58
+ gitdb==4.0.11
59
+ gitpython==3.1.41
60
+ google-auth-oauthlib==1.0.0
61
+ google-auth==2.27.0
62
+ google-pasta==0.2.0
63
+ grpcio==1.60.1
64
+ h11==0.14.0
65
+ h5py==3.10.0
66
+ httpcore==1.0.2
67
+ httpx==0.26.0
68
+ huggingface-hub==0.21.4
69
+ humanfriendly==10.0
70
+ idna==3.6
71
+ importlib-metadata==7.0.1
72
+ importlib-resources==6.1.1
73
+ iniconfig==2.0.0
74
+ ipdb==0.13.13
75
+ ipykernel==6.29.2
76
+ ipython==8.12.3
77
+ isoduration==20.11.0
78
+ jax==0.4.13
79
+ jaxlib==0.4.13
80
+ jedi==0.19.1
81
+ jinja2==3.1.2
82
+ jiwer==3.0.3
83
+ joblib==1.3.2
84
+ json5==0.9.14
85
+ jsonpointer==2.4
86
+ jsonschema-specifications==2023.12.1
87
+ jsonschema==4.21.1
88
+ jupyter-client==8.6.0
89
+ jupyter-core==5.7.1
90
+ jupyter-events==0.9.0
91
+ jupyter-lsp==2.2.2
92
+ jupyter-server-terminals==0.5.2
93
+ jupyter-server==2.12.5
94
+ jupyterlab-pygments==0.3.0
95
+ jupyterlab-server==2.25.2
96
+ jupyterlab==4.1.0
97
+ keras==2.13.1
98
+ kiwisolver==1.4.5
99
+ lazy-loader==0.3
100
+ libclang==16.0.6
101
+ librosa==0.10.1
102
+ llvmlite==0.41.1
103
+ markdown-it-py==3.0.0
104
+ markdown==3.5.2
105
+ markupsafe==2.1.3
106
+ matplotlib-inline==0.1.6
107
+ matplotlib==3.7.4
108
+ mdurl==0.1.2
109
+ mistune==3.0.2
110
+ ml-dtypes==0.2.0
111
+ more-itertools==10.2.0
112
+ mpmath==1.2.1
113
+ msclap==1.3.3
114
+ msgpack==1.0.7
115
+ multidict==6.0.5
116
+ multiprocess==0.70.15
117
+ nbclient==0.9.0
118
+ nbconvert==7.16.0
119
+ nbformat==5.9.2
120
+ nest-asyncio==1.6.0
121
+ networkx==3.0rc1
122
+ ninja==1.11.1.1
123
+ notebook-shim==0.2.3
124
+ numba==0.58.1
125
+ numpy==1.24.3
126
+ nvidia-cublas-cu12==12.1.3.1
127
+ nvidia-cuda-cupti-cu12==12.1.105
128
+ nvidia-cuda-nvrtc-cu12==12.1.105
129
+ nvidia-cuda-runtime-cu12==12.1.105
130
+ nvidia-cudnn-cu12==8.9.2.26
131
+ nvidia-cufft-cu12==11.0.2.54
132
+ nvidia-curand-cu12==10.3.2.106
133
+ nvidia-cusolver-cu12==11.4.5.107
134
+ nvidia-cusparse-cu12==12.1.0.106
135
+ nvidia-nccl-cu12==2.19.3
136
+ nvidia-nvjitlink-cu12==12.1.105
137
+ nvidia-nvtx-cu12==12.1.105
138
+ oauthlib==3.2.2
139
+ onnxruntime==1.17.1
140
+ openai-whisper==20231117
141
+ opt-einsum==3.3.0
142
+ optax==0.1.8
143
+ orbax-checkpoint==0.2.3
144
+ overrides==7.7.0
145
+ packaging==23.2
146
+ pandas==2.0.3
147
+ pandocfilters==1.5.1
148
+ parameterized==0.9.0
149
+ parso==0.8.3
150
+ peft==0.8.2
151
+ pexpect==4.9.0
152
+ pickleshare==0.7.5
153
+ pillow==9.3.0
154
+ pip==24.0
155
+ pkg-resources==0.0.0
156
+ pkgutil-resolve-name==1.3.10
157
+ platformdirs==4.2.0
158
+ pluggy==1.4.0
159
+ pooch==1.8.0
160
+ prometheus-client==0.19.0
161
+ prompt-toolkit==3.0.43
162
+ protobuf==4.25.2
163
+ psutil==5.9.8
164
+ ptyprocess==0.7.0
165
+ pure-eval==0.2.2
166
+ pyarrow-hotfix==0.6
167
+ pyarrow==15.0.0
168
+ pyasn1-modules==0.3.0
169
+ pyasn1==0.5.1
170
+ pycparser==2.21
171
+ pygments==2.17.2
172
+ pyparsing==3.1.1
173
+ pytest==7.4.4
174
+ python-dateutil==2.8.2
175
+ python-json-logger==2.0.7
176
+ pytorch-triton==3.0.0+901819d2b6
177
+ pytz==2024.1
178
+ pyyaml==6.0.1
179
+ pyzmq==25.1.2
180
+ rapidfuzz==3.6.1
181
+ referencing==0.33.0
182
+ regex==2023.12.25
183
+ requests-oauthlib==1.3.1
184
+ requests==2.31.0
185
+ responses==0.18.0
186
+ rfc3339-validator==0.1.4
187
+ rfc3986-validator==0.1.1
188
+ rich==13.7.0
189
+ rpds-py==0.17.1
190
+ rsa==4.9
191
+ safetensors==0.4.2
192
+ scikit-learn==1.3.2
193
+ scipy==1.10.1
194
+ send2trash==1.8.2
195
+ sentry-sdk==1.40.0
196
+ setproctitle==1.3.3
197
+ setuptools==44.0.0
198
+ shtab==1.7.0
199
+ six==1.16.0
200
+ smmap==5.0.1
201
+ sniffio==1.3.0
202
+ soundfile==0.12.1
203
+ soupsieve==2.5
204
+ soxr==0.3.7
205
+ stack-data==0.6.3
206
+ sympy==1.11.1
207
+ tensorboard-data-server==0.7.2
208
+ tensorboard==2.13.0
209
+ tensorflow-cpu==2.13.1
210
+ tensorflow-estimator==2.13.0
211
+ tensorflow-io-gcs-filesystem==0.34.0
212
+ tensorstore==0.1.45
213
+ termcolor==2.4.0
214
+ terminado==0.18.0
215
+ threadpoolctl==3.2.0
216
+ tiktoken==0.6.0
217
+ tinycss2==1.2.1
218
+ tokenizers==0.15.1
219
+ tomli==2.0.1
220
+ toolz==0.12.1
221
+ torch==2.2.1
222
+ torchaudio==2.2.1
223
+ torchlibrosa==0.1.0
224
+ torchvision==0.17.1
225
+ tornado==6.4
226
+ tqdm==4.66.1
227
+ traitlets==5.14.1
228
+ transformers==4.39.0.dev0
229
+ triton==2.2.0
230
+ trl==0.7.11
231
+ types-python-dateutil==2.8.19.20240106
232
+ typing-extensions==4.9.0
233
+ tyro==0.7.3
234
+ tzdata==2023.4
235
+ uri-template==1.3.0
236
+ urllib3==2.2.0
237
+ wandb==0.16.2
238
+ wcwidth==0.2.13
239
+ webcolors==1.13
240
+ webencodings==0.5.1
241
+ websocket-client==1.7.0
242
+ werkzeug==3.0.1
243
+ wheel==0.42.0
244
+ wrapt==1.16.0
245
+ xxhash==3.4.1
246
+ yarl==1.9.4
247
+ zipp==3.17.0
wandb/run-20240327_141033-golaq7b9/files/wandb-metadata.json ADDED
@@ -0,0 +1,738 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-5.4.0-166-generic-x86_64-with-glibc2.29",
3
+ "python": "3.8.10",
4
+ "heartbeatAt": "2024-03-27T13:10:33.980515",
5
+ "startedAt": "2024-03-27T13:10:33.210069",
6
+ "docker": null,
7
+ "cuda": null,
8
+ "args": [
9
+ "--model_name_or_path=distil-whisper/distil-large-v3",
10
+ "--dataset_name=mozilla-foundation/common_voice_16_1",
11
+ "--dataset_config_name=hi",
12
+ "--language=hindi",
13
+ "--train_split_name=train+validation",
14
+ "--eval_split_name=test",
15
+ "--max_steps=5000",
16
+ "--output_dir=./",
17
+ "--per_device_train_batch_size=32",
18
+ "--per_device_eval_batch_size=32",
19
+ "--logging_steps=25",
20
+ "--learning_rate=1e-4",
21
+ "--warmup_steps=500",
22
+ "--evaluation_strategy=steps",
23
+ "--eval_steps=1000",
24
+ "--save_strategy=steps",
25
+ "--save_steps=1000",
26
+ "--generation_max_length=225",
27
+ "--preprocessing_num_workers=1",
28
+ "--dataloader_num_workers=4",
29
+ "--length_column_name=input_length",
30
+ "--max_duration_in_seconds=30",
31
+ "--text_column_name=sentence",
32
+ "--freeze_feature_encoder=False",
33
+ "--freeze_encoder",
34
+ "--gradient_checkpointing",
35
+ "--fp16",
36
+ "--overwrite_output_dir",
37
+ "--do_train",
38
+ "--do_eval",
39
+ "--predict_with_generate",
40
+ "--use_auth_token",
41
+ "--push_to_hub"
42
+ ],
43
+ "state": "running",
44
+ "program": "run_speech_recognition_seq2seq.py",
45
+ "codePathLocal": "run_speech_recognition_seq2seq.py",
46
+ "codePath": "run_speech_recognition_seq2seq.py",
47
+ "git": {
48
+ "remote": "https://huggingface.co/sanchit-gandhi/distil-large-v3-hi-ft-frozen-encoder",
49
+ "commit": "e7946df277d73ac75c34c2017b01c6d39e0275cd"
50
+ },
51
+ "email": "[email protected]",
52
+ "root": "/home/sanchit/distil-large-v3-hi-ft-frozen-encoder",
53
+ "host": "hf-dgx-01",
54
+ "username": "sanchit",
55
+ "executable": "/home/sanchit/hf/bin/python",
56
+ "cpu_count": 64,
57
+ "cpu_count_logical": 128,
58
+ "cpu_freq": {
59
+ "current": 2202.863015625,
60
+ "min": 1500.0,
61
+ "max": 2250.0
62
+ },
63
+ "cpu_freq_per_core": [
64
+ {
65
+ "current": 3171.209,
66
+ "min": 1500.0,
67
+ "max": 2250.0
68
+ },
69
+ {
70
+ "current": 2364.325,
71
+ "min": 1500.0,
72
+ "max": 2250.0
73
+ },
74
+ {
75
+ "current": 1572.649,
76
+ "min": 1500.0,
77
+ "max": 2250.0
78
+ },
79
+ {
80
+ "current": 1659.311,
81
+ "min": 1500.0,
82
+ "max": 2250.0
83
+ },
84
+ {
85
+ "current": 1886.877,
86
+ "min": 1500.0,
87
+ "max": 2250.0
88
+ },
89
+ {
90
+ "current": 1977.114,
91
+ "min": 1500.0,
92
+ "max": 2250.0
93
+ },
94
+ {
95
+ "current": 2391.085,
96
+ "min": 1500.0,
97
+ "max": 2250.0
98
+ },
99
+ {
100
+ "current": 1715.56,
101
+ "min": 1500.0,
102
+ "max": 2250.0
103
+ },
104
+ {
105
+ "current": 1660.921,
106
+ "min": 1500.0,
107
+ "max": 2250.0
108
+ },
109
+ {
110
+ "current": 3284.409,
111
+ "min": 1500.0,
112
+ "max": 2250.0
113
+ },
114
+ {
115
+ "current": 1662.262,
116
+ "min": 1500.0,
117
+ "max": 2250.0
118
+ },
119
+ {
120
+ "current": 1664.285,
121
+ "min": 1500.0,
122
+ "max": 2250.0
123
+ },
124
+ {
125
+ "current": 2370.48,
126
+ "min": 1500.0,
127
+ "max": 2250.0
128
+ },
129
+ {
130
+ "current": 1659.366,
131
+ "min": 1500.0,
132
+ "max": 2250.0
133
+ },
134
+ {
135
+ "current": 3291.736,
136
+ "min": 1500.0,
137
+ "max": 2250.0
138
+ },
139
+ {
140
+ "current": 1664.415,
141
+ "min": 1500.0,
142
+ "max": 2250.0
143
+ },
144
+ {
145
+ "current": 2283.506,
146
+ "min": 1500.0,
147
+ "max": 2250.0
148
+ },
149
+ {
150
+ "current": 1713.973,
151
+ "min": 1500.0,
152
+ "max": 2250.0
153
+ },
154
+ {
155
+ "current": 1714.618,
156
+ "min": 1500.0,
157
+ "max": 2250.0
158
+ },
159
+ {
160
+ "current": 1714.445,
161
+ "min": 1500.0,
162
+ "max": 2250.0
163
+ },
164
+ {
165
+ "current": 3316.361,
166
+ "min": 1500.0,
167
+ "max": 2250.0
168
+ },
169
+ {
170
+ "current": 1645.973,
171
+ "min": 1500.0,
172
+ "max": 2250.0
173
+ },
174
+ {
175
+ "current": 1659.14,
176
+ "min": 1500.0,
177
+ "max": 2250.0
178
+ },
179
+ {
180
+ "current": 1660.739,
181
+ "min": 1500.0,
182
+ "max": 2250.0
183
+ },
184
+ {
185
+ "current": 3273.321,
186
+ "min": 1500.0,
187
+ "max": 2250.0
188
+ },
189
+ {
190
+ "current": 1658.755,
191
+ "min": 1500.0,
192
+ "max": 2250.0
193
+ },
194
+ {
195
+ "current": 1663.103,
196
+ "min": 1500.0,
197
+ "max": 2250.0
198
+ },
199
+ {
200
+ "current": 1661.275,
201
+ "min": 1500.0,
202
+ "max": 2250.0
203
+ },
204
+ {
205
+ "current": 3194.935,
206
+ "min": 1500.0,
207
+ "max": 2250.0
208
+ },
209
+ {
210
+ "current": 1730.989,
211
+ "min": 1500.0,
212
+ "max": 2250.0
213
+ },
214
+ {
215
+ "current": 1727.119,
216
+ "min": 1500.0,
217
+ "max": 2250.0
218
+ },
219
+ {
220
+ "current": 1697.351,
221
+ "min": 1500.0,
222
+ "max": 2250.0
223
+ },
224
+ {
225
+ "current": 3275.205,
226
+ "min": 1500.0,
227
+ "max": 2250.0
228
+ },
229
+ {
230
+ "current": 1665.717,
231
+ "min": 1500.0,
232
+ "max": 2250.0
233
+ },
234
+ {
235
+ "current": 1663.68,
236
+ "min": 1500.0,
237
+ "max": 2250.0
238
+ },
239
+ {
240
+ "current": 1669.771,
241
+ "min": 1500.0,
242
+ "max": 2250.0
243
+ },
244
+ {
245
+ "current": 3334.278,
246
+ "min": 1500.0,
247
+ "max": 2250.0
248
+ },
249
+ {
250
+ "current": 1635.286,
251
+ "min": 1500.0,
252
+ "max": 2250.0
253
+ },
254
+ {
255
+ "current": 1664.293,
256
+ "min": 1500.0,
257
+ "max": 2250.0
258
+ },
259
+ {
260
+ "current": 1664.902,
261
+ "min": 1500.0,
262
+ "max": 2250.0
263
+ },
264
+ {
265
+ "current": 1795.097,
266
+ "min": 1500.0,
267
+ "max": 2250.0
268
+ },
269
+ {
270
+ "current": 1795.841,
271
+ "min": 1500.0,
272
+ "max": 2250.0
273
+ },
274
+ {
275
+ "current": 1791.869,
276
+ "min": 1500.0,
277
+ "max": 2250.0
278
+ },
279
+ {
280
+ "current": 1794.608,
281
+ "min": 1500.0,
282
+ "max": 2250.0
283
+ },
284
+ {
285
+ "current": 1794.143,
286
+ "min": 1500.0,
287
+ "max": 2250.0
288
+ },
289
+ {
290
+ "current": 1793.214,
291
+ "min": 1500.0,
292
+ "max": 2250.0
293
+ },
294
+ {
295
+ "current": 1795.704,
296
+ "min": 1500.0,
297
+ "max": 2250.0
298
+ },
299
+ {
300
+ "current": 1792.981,
301
+ "min": 1500.0,
302
+ "max": 2250.0
303
+ },
304
+ {
305
+ "current": 3272.364,
306
+ "min": 1500.0,
307
+ "max": 2250.0
308
+ },
309
+ {
310
+ "current": 1663.677,
311
+ "min": 1500.0,
312
+ "max": 2250.0
313
+ },
314
+ {
315
+ "current": 1654.916,
316
+ "min": 1500.0,
317
+ "max": 2250.0
318
+ },
319
+ {
320
+ "current": 1657.783,
321
+ "min": 1500.0,
322
+ "max": 2250.0
323
+ },
324
+ {
325
+ "current": 1791.15,
326
+ "min": 1500.0,
327
+ "max": 2250.0
328
+ },
329
+ {
330
+ "current": 1796.079,
331
+ "min": 1500.0,
332
+ "max": 2250.0
333
+ },
334
+ {
335
+ "current": 1794.617,
336
+ "min": 1500.0,
337
+ "max": 2250.0
338
+ },
339
+ {
340
+ "current": 1794.699,
341
+ "min": 1500.0,
342
+ "max": 2250.0
343
+ },
344
+ {
345
+ "current": 1656.917,
346
+ "min": 1500.0,
347
+ "max": 2250.0
348
+ },
349
+ {
350
+ "current": 3275.536,
351
+ "min": 1500.0,
352
+ "max": 2250.0
353
+ },
354
+ {
355
+ "current": 1655.448,
356
+ "min": 1500.0,
357
+ "max": 2250.0
358
+ },
359
+ {
360
+ "current": 1656.093,
361
+ "min": 1500.0,
362
+ "max": 2250.0
363
+ },
364
+ {
365
+ "current": 1656.343,
366
+ "min": 1500.0,
367
+ "max": 2250.0
368
+ },
369
+ {
370
+ "current": 1657.244,
371
+ "min": 1500.0,
372
+ "max": 2250.0
373
+ },
374
+ {
375
+ "current": 1844.721,
376
+ "min": 1500.0,
377
+ "max": 2250.0
378
+ },
379
+ {
380
+ "current": 3312.551,
381
+ "min": 1500.0,
382
+ "max": 2250.0
383
+ },
384
+ {
385
+ "current": 3298.468,
386
+ "min": 1500.0,
387
+ "max": 2250.0
388
+ },
389
+ {
390
+ "current": 2342.129,
391
+ "min": 1500.0,
392
+ "max": 2250.0
393
+ },
394
+ {
395
+ "current": 2427.115,
396
+ "min": 1500.0,
397
+ "max": 2250.0
398
+ },
399
+ {
400
+ "current": 2390.813,
401
+ "min": 1500.0,
402
+ "max": 2250.0
403
+ },
404
+ {
405
+ "current": 2441.533,
406
+ "min": 1500.0,
407
+ "max": 2250.0
408
+ },
409
+ {
410
+ "current": 2391.094,
411
+ "min": 1500.0,
412
+ "max": 2250.0
413
+ },
414
+ {
415
+ "current": 2459.387,
416
+ "min": 1500.0,
417
+ "max": 2250.0
418
+ },
419
+ {
420
+ "current": 2458.036,
421
+ "min": 1500.0,
422
+ "max": 2250.0
423
+ },
424
+ {
425
+ "current": 2306.435,
426
+ "min": 1500.0,
427
+ "max": 2250.0
428
+ },
429
+ {
430
+ "current": 3298.811,
431
+ "min": 1500.0,
432
+ "max": 2250.0
433
+ },
434
+ {
435
+ "current": 2370.203,
436
+ "min": 1500.0,
437
+ "max": 2250.0
438
+ },
439
+ {
440
+ "current": 2387.939,
441
+ "min": 1500.0,
442
+ "max": 2250.0
443
+ },
444
+ {
445
+ "current": 1744.029,
446
+ "min": 1500.0,
447
+ "max": 2250.0
448
+ },
449
+ {
450
+ "current": 2438.926,
451
+ "min": 1500.0,
452
+ "max": 2250.0
453
+ },
454
+ {
455
+ "current": 3106.564,
456
+ "min": 1500.0,
457
+ "max": 2250.0
458
+ },
459
+ {
460
+ "current": 2415.235,
461
+ "min": 1500.0,
462
+ "max": 2250.0
463
+ },
464
+ {
465
+ "current": 3301.717,
466
+ "min": 1500.0,
467
+ "max": 2250.0
468
+ },
469
+ {
470
+ "current": 2364.152,
471
+ "min": 1500.0,
472
+ "max": 2250.0
473
+ },
474
+ {
475
+ "current": 2352.175,
476
+ "min": 1500.0,
477
+ "max": 2250.0
478
+ },
479
+ {
480
+ "current": 1650.287,
481
+ "min": 1500.0,
482
+ "max": 2250.0
483
+ },
484
+ {
485
+ "current": 3311.0,
486
+ "min": 1500.0,
487
+ "max": 2250.0
488
+ },
489
+ {
490
+ "current": 3305.609,
491
+ "min": 1500.0,
492
+ "max": 2250.0
493
+ },
494
+ {
495
+ "current": 1651.466,
496
+ "min": 1500.0,
497
+ "max": 2250.0
498
+ },
499
+ {
500
+ "current": 1653.023,
501
+ "min": 1500.0,
502
+ "max": 2250.0
503
+ },
504
+ {
505
+ "current": 3276.845,
506
+ "min": 1500.0,
507
+ "max": 2250.0
508
+ },
509
+ {
510
+ "current": 1653.413,
511
+ "min": 1500.0,
512
+ "max": 2250.0
513
+ },
514
+ {
515
+ "current": 3302.647,
516
+ "min": 1500.0,
517
+ "max": 2250.0
518
+ },
519
+ {
520
+ "current": 1653.312,
521
+ "min": 1500.0,
522
+ "max": 2250.0
523
+ },
524
+ {
525
+ "current": 3303.875,
526
+ "min": 1500.0,
527
+ "max": 2250.0
528
+ },
529
+ {
530
+ "current": 1620.896,
531
+ "min": 1500.0,
532
+ "max": 2250.0
533
+ },
534
+ {
535
+ "current": 1652.028,
536
+ "min": 1500.0,
537
+ "max": 2250.0
538
+ },
539
+ {
540
+ "current": 1653.199,
541
+ "min": 1500.0,
542
+ "max": 2250.0
543
+ },
544
+ {
545
+ "current": 3299.115,
546
+ "min": 1500.0,
547
+ "max": 2250.0
548
+ },
549
+ {
550
+ "current": 1651.729,
551
+ "min": 1500.0,
552
+ "max": 2250.0
553
+ },
554
+ {
555
+ "current": 1651.899,
556
+ "min": 1500.0,
557
+ "max": 2250.0
558
+ },
559
+ {
560
+ "current": 2278.731,
561
+ "min": 1500.0,
562
+ "max": 2250.0
563
+ },
564
+ {
565
+ "current": 3291.881,
566
+ "min": 1500.0,
567
+ "max": 2250.0
568
+ },
569
+ {
570
+ "current": 2372.068,
571
+ "min": 1500.0,
572
+ "max": 2250.0
573
+ },
574
+ {
575
+ "current": 1855.559,
576
+ "min": 1500.0,
577
+ "max": 2250.0
578
+ },
579
+ {
580
+ "current": 1654.361,
581
+ "min": 1500.0,
582
+ "max": 2250.0
583
+ },
584
+ {
585
+ "current": 1968.15,
586
+ "min": 1500.0,
587
+ "max": 2250.0
588
+ },
589
+ {
590
+ "current": 1670.484,
591
+ "min": 1500.0,
592
+ "max": 2250.0
593
+ },
594
+ {
595
+ "current": 1715.246,
596
+ "min": 1500.0,
597
+ "max": 2250.0
598
+ },
599
+ {
600
+ "current": 2443.499,
601
+ "min": 1500.0,
602
+ "max": 2250.0
603
+ },
604
+ {
605
+ "current": 2139.871,
606
+ "min": 1500.0,
607
+ "max": 2250.0
608
+ },
609
+ {
610
+ "current": 2161.111,
611
+ "min": 1500.0,
612
+ "max": 2250.0
613
+ },
614
+ {
615
+ "current": 1718.071,
616
+ "min": 1500.0,
617
+ "max": 2250.0
618
+ },
619
+ {
620
+ "current": 2151.675,
621
+ "min": 1500.0,
622
+ "max": 2250.0
623
+ },
624
+ {
625
+ "current": 2978.053,
626
+ "min": 1500.0,
627
+ "max": 2250.0
628
+ },
629
+ {
630
+ "current": 1655.523,
631
+ "min": 1500.0,
632
+ "max": 2250.0
633
+ },
634
+ {
635
+ "current": 1657.328,
636
+ "min": 1500.0,
637
+ "max": 2250.0
638
+ },
639
+ {
640
+ "current": 1657.12,
641
+ "min": 1500.0,
642
+ "max": 2250.0
643
+ },
644
+ {
645
+ "current": 2168.709,
646
+ "min": 1500.0,
647
+ "max": 2250.0
648
+ },
649
+ {
650
+ "current": 2050.918,
651
+ "min": 1500.0,
652
+ "max": 2250.0
653
+ },
654
+ {
655
+ "current": 2050.632,
656
+ "min": 1500.0,
657
+ "max": 2250.0
658
+ },
659
+ {
660
+ "current": 1656.328,
661
+ "min": 1500.0,
662
+ "max": 2250.0
663
+ },
664
+ {
665
+ "current": 1670.254,
666
+ "min": 1500.0,
667
+ "max": 2250.0
668
+ },
669
+ {
670
+ "current": 3291.107,
671
+ "min": 1500.0,
672
+ "max": 2250.0
673
+ },
674
+ {
675
+ "current": 1660.234,
676
+ "min": 1500.0,
677
+ "max": 2250.0
678
+ },
679
+ {
680
+ "current": 1654.604,
681
+ "min": 1500.0,
682
+ "max": 2250.0
683
+ },
684
+ {
685
+ "current": 1657.069,
686
+ "min": 1500.0,
687
+ "max": 2250.0
688
+ },
689
+ {
690
+ "current": 1659.425,
691
+ "min": 1500.0,
692
+ "max": 2250.0
693
+ },
694
+ {
695
+ "current": 1850.263,
696
+ "min": 1500.0,
697
+ "max": 2250.0
698
+ },
699
+ {
700
+ "current": 3277.475,
701
+ "min": 1500.0,
702
+ "max": 2250.0
703
+ }
704
+ ],
705
+ "disk": {
706
+ "/": {
707
+ "total": 1757.8785285949707,
708
+ "used": 1516.8665618896484
709
+ }
710
+ },
711
+ "gpu": "NVIDIA A100-SXM4-80GB",
712
+ "gpu_count": 5,
713
+ "gpu_devices": [
714
+ {
715
+ "name": "NVIDIA A100-SXM4-80GB",
716
+ "memory_total": 85899345920
717
+ },
718
+ {
719
+ "name": "NVIDIA A100-SXM4-80GB",
720
+ "memory_total": 85899345920
721
+ },
722
+ {
723
+ "name": "NVIDIA A100-SXM4-80GB",
724
+ "memory_total": 85899345920
725
+ },
726
+ {
727
+ "name": "NVIDIA DGX Display",
728
+ "memory_total": 4294967296
729
+ },
730
+ {
731
+ "name": "NVIDIA A100-SXM4-80GB",
732
+ "memory_total": 85899345920
733
+ }
734
+ ],
735
+ "memory": {
736
+ "total": 503.5396919250488
737
+ }
738
+ }
wandb/run-20240327_141033-golaq7b9/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"train/loss": 0.1066, "train/grad_norm": 1.6706323623657227, "train/learning_rate": 8.78888888888889e-05, "train/epoch": 4.73, "train/global_step": 1050, "_timestamp": 1711548173.35093, "_runtime": 3140.1371948719025, "_step": 42, "eval/loss": 0.3065292239189148, "eval/wer": 0.3243838368229931, "eval/runtime": 1302.6648, "eval/samples_per_second": 2.397, "eval/steps_per_second": 0.075}
wandb/run-20240327_141033-golaq7b9/logs/debug-internal.log ADDED
The diff for this file is too large to render. See raw diff
 
wandb/run-20240327_141033-golaq7b9/logs/debug.log ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Current SDK version is 0.16.2
2
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Configure stats pid to 1482814
3
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/.config/wandb/settings
4
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Loading settings from /home/sanchit/distil-large-v3-hi-ft-frozen-encoder/wandb/settings
5
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program_relpath': 'run_speech_recognition_seq2seq.py', 'program_abspath': '/home/sanchit/distil-large-v3-hi-ft-frozen-encoder/run_speech_recognition_seq2seq.py', 'program': 'run_speech_recognition_seq2seq.py'}
8
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_init.py:_log_setup():526] Logging user logs to /home/sanchit/distil-large-v3-hi-ft-frozen-encoder/wandb/run-20240327_141033-golaq7b9/logs/debug.log
9
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_init.py:_log_setup():527] Logging internal logs to /home/sanchit/distil-large-v3-hi-ft-frozen-encoder/wandb/run-20240327_141033-golaq7b9/logs/debug-internal.log
10
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_init.py:init():566] calling init triggers
11
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_init.py:init():573] wandb.init called with sweep_config: {}
12
+ config: {}
13
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_init.py:init():616] starting backend
14
+ 2024-03-27 14:10:33,211 INFO MainThread:1482814 [wandb_init.py:init():620] setting up manager
15
+ 2024-03-27 14:10:33,212 INFO MainThread:1482814 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
16
+ 2024-03-27 14:10:33,213 INFO MainThread:1482814 [wandb_init.py:init():628] backend started and connected
17
+ 2024-03-27 14:10:33,217 INFO MainThread:1482814 [wandb_init.py:init():720] updated telemetry
18
+ 2024-03-27 14:10:33,272 INFO MainThread:1482814 [wandb_init.py:init():753] communicating run to backend with 90.0 second timeout
19
+ 2024-03-27 14:10:33,578 INFO MainThread:1482814 [wandb_run.py:_on_init():2254] communicating current version
20
+ 2024-03-27 14:10:33,602 INFO MainThread:1482814 [wandb_run.py:_on_init():2263] got version response upgrade_message: "wandb version 0.16.5 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
21
+
22
+ 2024-03-27 14:10:33,602 INFO MainThread:1482814 [wandb_init.py:init():804] starting run threads in backend
23
+ 2024-03-27 14:10:34,006 INFO MainThread:1482814 [wandb_run.py:_console_start():2233] atexit reg
24
+ 2024-03-27 14:10:34,006 INFO MainThread:1482814 [wandb_run.py:_redirect():2088] redirect: wrap_raw
25
+ 2024-03-27 14:10:34,006 INFO MainThread:1482814 [wandb_run.py:_redirect():2153] Wrapping output streams.
26
+ 2024-03-27 14:10:34,007 INFO MainThread:1482814 [wandb_run.py:_redirect():2178] Redirects installed.
27
+ 2024-03-27 14:10:34,007 INFO MainThread:1482814 [wandb_init.py:init():847] run started, returning control to user process
28
+ 2024-03-27 14:10:34,009 INFO MainThread:1482814 [wandb_run.py:_config_callback():1342] config_cb None None {'vocab_size': 51866, 'num_mel_bins': 128, 'd_model': 1280, 'encoder_layers': 32, 'encoder_attention_heads': 20, 'decoder_layers': 2, 'decoder_attention_heads': 20, 'decoder_ffn_dim': 5120, 'encoder_ffn_dim': 5120, 'dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.0, 'activation_function': 'gelu', 'init_std': 0.02, 'encoder_layerdrop': 0.0, 'decoder_layerdrop': 0.0, 'use_cache': True, 'num_hidden_layers': 32, 'scale_embedding': False, 'max_source_positions': 1500, 'max_target_positions': 448, 'classifier_proj_size': 256, 'use_weighted_layer_sum': False, 'apply_spec_augment': False, 'mask_time_prob': 0.05, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.0, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'median_filter_width': 7, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': True, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 448, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': [220, 50257], 'architectures': ['WhisperForConditionalGeneration'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 50257, 'pad_token_id': 50256, 'eos_token_id': 50257, 'sep_token_id': None, 'decoder_start_token_id': 50258, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'distil-whisper/distil-large-v3', 'transformers_version': '4.40.0.dev0', 'model_type': 'whisper', 'forced_decoder_ids': None, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 32, 'per_device_eval_batch_size': 32, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 3.0, 'max_steps': 5000, 'lr_scheduler_type': 'linear', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.0, 'warmup_steps': 500, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': './runs/Mar27_14-10-22_hf-dgx-01', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 25, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 1000, 'save_total_limit': None, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': 1000, 'dataloader_num_workers': 4, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'adamw_torch', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'input_length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None, 'sortish_sampler': False, 'predict_with_generate': True, 'generation_max_length': 225, 'generation_num_beams': None, 'generation_config': None}
wandb/run-20240327_141033-golaq7b9/run-golaq7b9.wandb ADDED
Binary file (871 kB). View file