jvh commited on
Commit
ee0de09
1 Parent(s): f615e0f

Upload 4 files

Browse files
Files changed (4) hide show
  1. config.json +50 -0
  2. generation_config.json +249 -0
  3. model.safetensors +3 -0
  4. quanto_qmap.json +166 -0
config.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "ylacombe/whisper-large-v3-turbo",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "gelu",
5
+ "apply_spec_augment": false,
6
+ "architectures": [
7
+ "WhisperForCausalLM"
8
+ ],
9
+ "attention_dropout": 0.0,
10
+ "begin_suppress_tokens": [
11
+ 220,
12
+ 50256
13
+ ],
14
+ "bos_token_id": 50257,
15
+ "classifier_proj_size": 256,
16
+ "d_model": 1280,
17
+ "decoder_attention_heads": 20,
18
+ "decoder_ffn_dim": 5120,
19
+ "decoder_layerdrop": 0.0,
20
+ "decoder_layers": 4,
21
+ "decoder_start_token_id": 50258,
22
+ "dropout": 0.0,
23
+ "encoder_attention_heads": 20,
24
+ "encoder_ffn_dim": 5120,
25
+ "encoder_layerdrop": 0.0,
26
+ "encoder_layers": 32,
27
+ "eos_token_id": 50257,
28
+ "init_std": 0.02,
29
+ "is_encoder_decoder": false,
30
+ "mask_feature_length": 10,
31
+ "mask_feature_min_masks": 0,
32
+ "mask_feature_prob": 0.0,
33
+ "mask_time_length": 10,
34
+ "mask_time_min_masks": 2,
35
+ "mask_time_prob": 0.05,
36
+ "max_source_positions": 1500,
37
+ "max_target_positions": 448,
38
+ "median_filter_width": 7,
39
+ "model_type": "whisper",
40
+ "num_hidden_layers": 32,
41
+ "num_mel_bins": 128,
42
+ "pad_token_id": 50257,
43
+ "scale_embedding": false,
44
+ "tie_word_embeddings": false,
45
+ "torch_dtype": "float32",
46
+ "transformers_version": "4.36.2",
47
+ "use_cache": true,
48
+ "use_weighted_layer_sum": false,
49
+ "vocab_size": 51866
50
+ }
generation_config.json ADDED
@@ -0,0 +1,249 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alignment_heads": [
3
+ [
4
+ 2,
5
+ 4
6
+ ],
7
+ [
8
+ 2,
9
+ 11
10
+ ],
11
+ [
12
+ 3,
13
+ 3
14
+ ],
15
+ [
16
+ 3,
17
+ 6
18
+ ],
19
+ [
20
+ 3,
21
+ 11
22
+ ],
23
+ [
24
+ 3,
25
+ 14
26
+ ]
27
+ ],
28
+ "begin_suppress_tokens": [
29
+ 220,
30
+ 50257
31
+ ],
32
+ "bos_token_id": 50257,
33
+ "decoder_start_token_id": 50258,
34
+ "eos_token_id": 50257,
35
+ "forced_decoder_ids": [
36
+ [
37
+ 1,
38
+ null
39
+ ],
40
+ [
41
+ 2,
42
+ 50360
43
+ ]
44
+ ],
45
+ "is_multilingual": true,
46
+ "lang_to_id": {
47
+ "<|af|>": 50327,
48
+ "<|am|>": 50334,
49
+ "<|ar|>": 50272,
50
+ "<|as|>": 50350,
51
+ "<|az|>": 50304,
52
+ "<|ba|>": 50355,
53
+ "<|be|>": 50330,
54
+ "<|bg|>": 50292,
55
+ "<|bn|>": 50302,
56
+ "<|bo|>": 50347,
57
+ "<|br|>": 50309,
58
+ "<|bs|>": 50315,
59
+ "<|ca|>": 50270,
60
+ "<|cs|>": 50283,
61
+ "<|cy|>": 50297,
62
+ "<|da|>": 50285,
63
+ "<|de|>": 50261,
64
+ "<|el|>": 50281,
65
+ "<|en|>": 50259,
66
+ "<|es|>": 50262,
67
+ "<|et|>": 50307,
68
+ "<|eu|>": 50310,
69
+ "<|fa|>": 50300,
70
+ "<|fi|>": 50277,
71
+ "<|fo|>": 50338,
72
+ "<|fr|>": 50265,
73
+ "<|gl|>": 50319,
74
+ "<|gu|>": 50333,
75
+ "<|haw|>": 50352,
76
+ "<|ha|>": 50354,
77
+ "<|he|>": 50279,
78
+ "<|hi|>": 50276,
79
+ "<|hr|>": 50291,
80
+ "<|ht|>": 50339,
81
+ "<|hu|>": 50286,
82
+ "<|hy|>": 50312,
83
+ "<|id|>": 50275,
84
+ "<|is|>": 50311,
85
+ "<|it|>": 50274,
86
+ "<|ja|>": 50266,
87
+ "<|jw|>": 50356,
88
+ "<|ka|>": 50329,
89
+ "<|kk|>": 50316,
90
+ "<|km|>": 50323,
91
+ "<|kn|>": 50306,
92
+ "<|ko|>": 50264,
93
+ "<|la|>": 50294,
94
+ "<|lb|>": 50345,
95
+ "<|ln|>": 50353,
96
+ "<|lo|>": 50336,
97
+ "<|lt|>": 50293,
98
+ "<|lv|>": 50301,
99
+ "<|mg|>": 50349,
100
+ "<|mi|>": 50295,
101
+ "<|mk|>": 50308,
102
+ "<|ml|>": 50296,
103
+ "<|mn|>": 50314,
104
+ "<|mr|>": 50320,
105
+ "<|ms|>": 50282,
106
+ "<|mt|>": 50343,
107
+ "<|my|>": 50346,
108
+ "<|ne|>": 50313,
109
+ "<|nl|>": 50271,
110
+ "<|nn|>": 50342,
111
+ "<|no|>": 50288,
112
+ "<|oc|>": 50328,
113
+ "<|pa|>": 50321,
114
+ "<|pl|>": 50269,
115
+ "<|ps|>": 50340,
116
+ "<|pt|>": 50267,
117
+ "<|ro|>": 50284,
118
+ "<|ru|>": 50263,
119
+ "<|sa|>": 50344,
120
+ "<|sd|>": 50332,
121
+ "<|si|>": 50322,
122
+ "<|sk|>": 50298,
123
+ "<|sl|>": 50305,
124
+ "<|sn|>": 50324,
125
+ "<|so|>": 50326,
126
+ "<|sq|>": 50317,
127
+ "<|sr|>": 50303,
128
+ "<|su|>": 50357,
129
+ "<|sv|>": 50273,
130
+ "<|sw|>": 50318,
131
+ "<|ta|>": 50287,
132
+ "<|te|>": 50299,
133
+ "<|tg|>": 50331,
134
+ "<|th|>": 50289,
135
+ "<|tk|>": 50341,
136
+ "<|tl|>": 50348,
137
+ "<|tr|>": 50268,
138
+ "<|tt|>": 50351,
139
+ "<|uk|>": 50280,
140
+ "<|ur|>": 50290,
141
+ "<|uz|>": 50337,
142
+ "<|vi|>": 50278,
143
+ "<|yi|>": 50335,
144
+ "<|yo|>": 50325,
145
+ "<|yue|>": 50358,
146
+ "<|zh|>": 50260
147
+ },
148
+ "max_initial_timestamp_index": 50,
149
+ "max_length": 448,
150
+ "no_timestamps_token_id": 50364,
151
+ "pad_token_id": 50257,
152
+ "prev_sot_token_id": 50362,
153
+ "return_timestamps": false,
154
+ "suppress_tokens": [
155
+ 1,
156
+ 2,
157
+ 7,
158
+ 8,
159
+ 9,
160
+ 10,
161
+ 14,
162
+ 25,
163
+ 26,
164
+ 27,
165
+ 28,
166
+ 29,
167
+ 31,
168
+ 58,
169
+ 59,
170
+ 60,
171
+ 61,
172
+ 62,
173
+ 63,
174
+ 90,
175
+ 91,
176
+ 92,
177
+ 93,
178
+ 359,
179
+ 503,
180
+ 522,
181
+ 542,
182
+ 873,
183
+ 893,
184
+ 902,
185
+ 918,
186
+ 922,
187
+ 931,
188
+ 1350,
189
+ 1853,
190
+ 1982,
191
+ 2460,
192
+ 2627,
193
+ 3246,
194
+ 3253,
195
+ 3268,
196
+ 3536,
197
+ 3846,
198
+ 3961,
199
+ 4183,
200
+ 4667,
201
+ 6585,
202
+ 6647,
203
+ 7273,
204
+ 9061,
205
+ 9383,
206
+ 10428,
207
+ 10929,
208
+ 11938,
209
+ 12033,
210
+ 12331,
211
+ 12562,
212
+ 13793,
213
+ 14157,
214
+ 14635,
215
+ 15265,
216
+ 15618,
217
+ 16553,
218
+ 16604,
219
+ 18362,
220
+ 18956,
221
+ 20075,
222
+ 21675,
223
+ 22520,
224
+ 26130,
225
+ 26161,
226
+ 26435,
227
+ 28279,
228
+ 29464,
229
+ 31650,
230
+ 32302,
231
+ 32470,
232
+ 36865,
233
+ 42863,
234
+ 47425,
235
+ 49870,
236
+ 50254,
237
+ 50258,
238
+ 50359,
239
+ 50360,
240
+ 50361,
241
+ 50362,
242
+ 50363
243
+ ],
244
+ "task_to_id": {
245
+ "transcribe": 50360,
246
+ "translate": 50359
247
+ },
248
+ "transformers_version": "4.36.2"
249
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd08293e36e783adcabed783a67e781414b74821384bc7ea96a376f454888f85
3
+ size 439952952
quanto_qmap.json ADDED
@@ -0,0 +1,166 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model.decoder.layers.0.self_attn.k_proj": {
3
+ "weights": "qint8",
4
+ "activations": "none"
5
+ },
6
+ "model.decoder.layers.0.self_attn.v_proj": {
7
+ "weights": "qint8",
8
+ "activations": "none"
9
+ },
10
+ "model.decoder.layers.0.self_attn.q_proj": {
11
+ "weights": "qint8",
12
+ "activations": "none"
13
+ },
14
+ "model.decoder.layers.0.self_attn.out_proj": {
15
+ "weights": "qint8",
16
+ "activations": "none"
17
+ },
18
+ "model.decoder.layers.0.encoder_attn.k_proj": {
19
+ "weights": "qint8",
20
+ "activations": "none"
21
+ },
22
+ "model.decoder.layers.0.encoder_attn.v_proj": {
23
+ "weights": "qint8",
24
+ "activations": "none"
25
+ },
26
+ "model.decoder.layers.0.encoder_attn.q_proj": {
27
+ "weights": "qint8",
28
+ "activations": "none"
29
+ },
30
+ "model.decoder.layers.0.encoder_attn.out_proj": {
31
+ "weights": "qint8",
32
+ "activations": "none"
33
+ },
34
+ "model.decoder.layers.0.fc1": {
35
+ "weights": "qint8",
36
+ "activations": "none"
37
+ },
38
+ "model.decoder.layers.0.fc2": {
39
+ "weights": "qint8",
40
+ "activations": "none"
41
+ },
42
+ "model.decoder.layers.1.self_attn.k_proj": {
43
+ "weights": "qint8",
44
+ "activations": "none"
45
+ },
46
+ "model.decoder.layers.1.self_attn.v_proj": {
47
+ "weights": "qint8",
48
+ "activations": "none"
49
+ },
50
+ "model.decoder.layers.1.self_attn.q_proj": {
51
+ "weights": "qint8",
52
+ "activations": "none"
53
+ },
54
+ "model.decoder.layers.1.self_attn.out_proj": {
55
+ "weights": "qint8",
56
+ "activations": "none"
57
+ },
58
+ "model.decoder.layers.1.encoder_attn.k_proj": {
59
+ "weights": "qint8",
60
+ "activations": "none"
61
+ },
62
+ "model.decoder.layers.1.encoder_attn.v_proj": {
63
+ "weights": "qint8",
64
+ "activations": "none"
65
+ },
66
+ "model.decoder.layers.1.encoder_attn.q_proj": {
67
+ "weights": "qint8",
68
+ "activations": "none"
69
+ },
70
+ "model.decoder.layers.1.encoder_attn.out_proj": {
71
+ "weights": "qint8",
72
+ "activations": "none"
73
+ },
74
+ "model.decoder.layers.1.fc1": {
75
+ "weights": "qint8",
76
+ "activations": "none"
77
+ },
78
+ "model.decoder.layers.1.fc2": {
79
+ "weights": "qint8",
80
+ "activations": "none"
81
+ },
82
+ "model.decoder.layers.2.self_attn.k_proj": {
83
+ "weights": "qint8",
84
+ "activations": "none"
85
+ },
86
+ "model.decoder.layers.2.self_attn.v_proj": {
87
+ "weights": "qint8",
88
+ "activations": "none"
89
+ },
90
+ "model.decoder.layers.2.self_attn.q_proj": {
91
+ "weights": "qint8",
92
+ "activations": "none"
93
+ },
94
+ "model.decoder.layers.2.self_attn.out_proj": {
95
+ "weights": "qint8",
96
+ "activations": "none"
97
+ },
98
+ "model.decoder.layers.2.encoder_attn.k_proj": {
99
+ "weights": "qint8",
100
+ "activations": "none"
101
+ },
102
+ "model.decoder.layers.2.encoder_attn.v_proj": {
103
+ "weights": "qint8",
104
+ "activations": "none"
105
+ },
106
+ "model.decoder.layers.2.encoder_attn.q_proj": {
107
+ "weights": "qint8",
108
+ "activations": "none"
109
+ },
110
+ "model.decoder.layers.2.encoder_attn.out_proj": {
111
+ "weights": "qint8",
112
+ "activations": "none"
113
+ },
114
+ "model.decoder.layers.2.fc1": {
115
+ "weights": "qint8",
116
+ "activations": "none"
117
+ },
118
+ "model.decoder.layers.2.fc2": {
119
+ "weights": "qint8",
120
+ "activations": "none"
121
+ },
122
+ "model.decoder.layers.3.self_attn.k_proj": {
123
+ "weights": "qint8",
124
+ "activations": "none"
125
+ },
126
+ "model.decoder.layers.3.self_attn.v_proj": {
127
+ "weights": "qint8",
128
+ "activations": "none"
129
+ },
130
+ "model.decoder.layers.3.self_attn.q_proj": {
131
+ "weights": "qint8",
132
+ "activations": "none"
133
+ },
134
+ "model.decoder.layers.3.self_attn.out_proj": {
135
+ "weights": "qint8",
136
+ "activations": "none"
137
+ },
138
+ "model.decoder.layers.3.encoder_attn.k_proj": {
139
+ "weights": "qint8",
140
+ "activations": "none"
141
+ },
142
+ "model.decoder.layers.3.encoder_attn.v_proj": {
143
+ "weights": "qint8",
144
+ "activations": "none"
145
+ },
146
+ "model.decoder.layers.3.encoder_attn.q_proj": {
147
+ "weights": "qint8",
148
+ "activations": "none"
149
+ },
150
+ "model.decoder.layers.3.encoder_attn.out_proj": {
151
+ "weights": "qint8",
152
+ "activations": "none"
153
+ },
154
+ "model.decoder.layers.3.fc1": {
155
+ "weights": "qint8",
156
+ "activations": "none"
157
+ },
158
+ "model.decoder.layers.3.fc2": {
159
+ "weights": "qint8",
160
+ "activations": "none"
161
+ },
162
+ "proj_out": {
163
+ "weights": "qint8",
164
+ "activations": "none"
165
+ }
166
+ }