jinggu commited on
Commit
b0fb749
·
verified ·
1 Parent(s): 9276176

Delete great-13b

Browse files
great-13b/config.json DELETED
@@ -1,254 +0,0 @@
1
- {
2
- "_name_or_path": "/home/jasonlu/checkpoints/vila-13b-stage2_5_sft_r2_16_more_no_left_padding_r6",
3
- "architectures": [
4
- "LlavaLlamaModel"
5
- ],
6
- "drop_path_rate": 0.0,
7
- "fps": 0.0,
8
- "hidden_size": 5120,
9
- "image_aspect_ratio": "resize",
10
- "interpolate_mode": "linear",
11
- "llm_cfg": {
12
- "_name_or_path": "/home/jasonlu/checkpoints/vila-13b-stage2_5_sft_r2_16_more_no_left_padding_r6/llm",
13
- "add_cross_attention": false,
14
- "architectures": [
15
- "LlamaForCausalLM"
16
- ],
17
- "attention_bias": false,
18
- "attention_dropout": 0.0,
19
- "bad_words_ids": null,
20
- "begin_suppress_tokens": null,
21
- "bos_token_id": 1,
22
- "chunk_size_feed_forward": 0,
23
- "cross_attention_hidden_size": null,
24
- "decoder_start_token_id": null,
25
- "diversity_penalty": 0.0,
26
- "do_sample": false,
27
- "early_stopping": false,
28
- "encoder_no_repeat_ngram_size": 0,
29
- "eos_token_id": 2,
30
- "exponential_decay_length_penalty": null,
31
- "finetuning_task": null,
32
- "forced_bos_token_id": null,
33
- "forced_eos_token_id": null,
34
- "hidden_act": "silu",
35
- "hidden_size": 5120,
36
- "id2label": {
37
- "0": "LABEL_0",
38
- "1": "LABEL_1"
39
- },
40
- "initializer_range": 0.02,
41
- "intermediate_size": 13824,
42
- "is_decoder": false,
43
- "is_encoder_decoder": false,
44
- "label2id": {
45
- "LABEL_0": 0,
46
- "LABEL_1": 1
47
- },
48
- "length_penalty": 1.0,
49
- "max_length": 4096,
50
- "max_position_embeddings": 4096,
51
- "min_length": 0,
52
- "model_max_length": 4096,
53
- "model_type": "llama",
54
- "no_repeat_ngram_size": 0,
55
- "num_attention_heads": 40,
56
- "num_beam_groups": 1,
57
- "num_beams": 1,
58
- "num_hidden_layers": 40,
59
- "num_key_value_heads": 40,
60
- "num_return_sequences": 1,
61
- "output_attentions": false,
62
- "output_hidden_states": false,
63
- "output_scores": false,
64
- "pad_token_id": 0,
65
- "prefix": null,
66
- "pretraining_tp": 1,
67
- "problem_type": null,
68
- "pruned_heads": {},
69
- "remove_invalid_values": false,
70
- "repetition_penalty": 1.0,
71
- "return_dict": true,
72
- "return_dict_in_generate": false,
73
- "rms_norm_eps": 1e-05,
74
- "rope_scaling": null,
75
- "rope_theta": 10000.0,
76
- "sep_token_id": null,
77
- "suppress_tokens": null,
78
- "task_specific_params": null,
79
- "temperature": 1.0,
80
- "tf_legacy_loss": false,
81
- "tie_encoder_decoder": false,
82
- "tie_word_embeddings": false,
83
- "tokenizer_class": null,
84
- "tokenizer_model_max_length": 4096,
85
- "tokenizer_padding_side": "right",
86
- "top_k": 50,
87
- "top_p": 1.0,
88
- "torch_dtype": "bfloat16",
89
- "torchscript": false,
90
- "typical_p": 1.0,
91
- "use_bfloat16": false,
92
- "use_cache": true,
93
- "vocab_size": 32000
94
- },
95
- "mm_hidden_size": 1152,
96
- "mm_projector_cfg": {
97
- "_name_or_path": "/home/jasonlu/checkpoints/vila-13b-stage2_5_sft_r2_16_more_no_left_padding_r6/mm_projector",
98
- "add_cross_attention": false,
99
- "architectures": [
100
- "MultimodalProjector"
101
- ],
102
- "bad_words_ids": null,
103
- "begin_suppress_tokens": null,
104
- "bos_token_id": null,
105
- "chunk_size_feed_forward": 0,
106
- "cross_attention_hidden_size": null,
107
- "decoder_start_token_id": null,
108
- "diversity_penalty": 0.0,
109
- "do_sample": false,
110
- "early_stopping": false,
111
- "encoder_no_repeat_ngram_size": 0,
112
- "eos_token_id": null,
113
- "exponential_decay_length_penalty": null,
114
- "finetuning_task": null,
115
- "forced_bos_token_id": null,
116
- "forced_eos_token_id": null,
117
- "id2label": {
118
- "0": "LABEL_0",
119
- "1": "LABEL_1"
120
- },
121
- "is_decoder": false,
122
- "is_encoder_decoder": false,
123
- "label2id": {
124
- "LABEL_0": 0,
125
- "LABEL_1": 1
126
- },
127
- "length_penalty": 1.0,
128
- "max_length": 20,
129
- "min_length": 0,
130
- "mm_projector_type": "mlp_downsample",
131
- "model_type": "v2l_projector",
132
- "no_repeat_ngram_size": 0,
133
- "num_beam_groups": 1,
134
- "num_beams": 1,
135
- "num_return_sequences": 1,
136
- "output_attentions": false,
137
- "output_hidden_states": false,
138
- "output_scores": false,
139
- "pad_token_id": null,
140
- "prefix": null,
141
- "problem_type": null,
142
- "pruned_heads": {},
143
- "remove_invalid_values": false,
144
- "repetition_penalty": 1.0,
145
- "return_dict": true,
146
- "return_dict_in_generate": false,
147
- "sep_token_id": null,
148
- "suppress_tokens": null,
149
- "task_specific_params": null,
150
- "temperature": 1.0,
151
- "tf_legacy_loss": false,
152
- "tie_encoder_decoder": false,
153
- "tie_word_embeddings": true,
154
- "tokenizer_class": null,
155
- "top_k": 50,
156
- "top_p": 1.0,
157
- "torch_dtype": "bfloat16",
158
- "torchscript": false,
159
- "typical_p": 1.0,
160
- "use_bfloat16": false
161
- },
162
- "mm_projector_lr": null,
163
- "mm_use_im_patch_token": false,
164
- "mm_use_im_start_end": false,
165
- "mm_vision_select_feature": "cls_patch",
166
- "mm_vision_select_layer": -1,
167
- "model_dtype": "torch.bfloat16",
168
- "model_type": "llava_llama",
169
- "num_video_frames": 16,
170
- "resume_path": "/home/jasonlu/checkpoints/vila-13b-stage2_5_sft_r2_16_more_no_left_padding_r6",
171
- "s2": false,
172
- "s2_max_split_size": 336,
173
- "s2_scales": "336,672,1008",
174
- "transformers_version": "4.36.2",
175
- "tune_language_model": true,
176
- "tune_mm_projector": true,
177
- "tune_vision_tower": true,
178
- "vision_resolution": -1,
179
- "vision_tower_cfg": {
180
- "_name_or_path": "/home/jasonlu/checkpoints/vila-13b-stage2_5_sft_r2_16_more_no_left_padding_r6/vision_tower",
181
- "add_cross_attention": false,
182
- "architectures": [
183
- "SiglipVisionModel"
184
- ],
185
- "attention_dropout": 0.0,
186
- "bad_words_ids": null,
187
- "begin_suppress_tokens": null,
188
- "bos_token_id": null,
189
- "chunk_size_feed_forward": 0,
190
- "cross_attention_hidden_size": null,
191
- "decoder_start_token_id": null,
192
- "diversity_penalty": 0.0,
193
- "do_sample": false,
194
- "early_stopping": false,
195
- "encoder_no_repeat_ngram_size": 0,
196
- "eos_token_id": null,
197
- "exponential_decay_length_penalty": null,
198
- "finetuning_task": null,
199
- "forced_bos_token_id": null,
200
- "forced_eos_token_id": null,
201
- "hidden_act": "gelu_pytorch_tanh",
202
- "hidden_size": 1152,
203
- "id2label": {
204
- "0": "LABEL_0",
205
- "1": "LABEL_1"
206
- },
207
- "image_size": 384,
208
- "intermediate_size": 4304,
209
- "is_decoder": false,
210
- "is_encoder_decoder": false,
211
- "label2id": {
212
- "LABEL_0": 0,
213
- "LABEL_1": 1
214
- },
215
- "layer_norm_eps": 1e-06,
216
- "length_penalty": 1.0,
217
- "max_length": 20,
218
- "min_length": 0,
219
- "model_type": "siglip_vision_model",
220
- "no_repeat_ngram_size": 0,
221
- "num_attention_heads": 16,
222
- "num_beam_groups": 1,
223
- "num_beams": 1,
224
- "num_channels": 3,
225
- "num_hidden_layers": 27,
226
- "num_return_sequences": 1,
227
- "output_attentions": false,
228
- "output_hidden_states": false,
229
- "output_scores": false,
230
- "pad_token_id": null,
231
- "patch_size": 14,
232
- "prefix": null,
233
- "problem_type": null,
234
- "pruned_heads": {},
235
- "remove_invalid_values": false,
236
- "repetition_penalty": 1.0,
237
- "return_dict": true,
238
- "return_dict_in_generate": false,
239
- "sep_token_id": null,
240
- "suppress_tokens": null,
241
- "task_specific_params": null,
242
- "temperature": 1.0,
243
- "tf_legacy_loss": false,
244
- "tie_encoder_decoder": false,
245
- "tie_word_embeddings": true,
246
- "tokenizer_class": null,
247
- "top_k": 50,
248
- "top_p": 1.0,
249
- "torch_dtype": "bfloat16",
250
- "torchscript": false,
251
- "typical_p": 1.0,
252
- "use_bfloat16": false
253
- }
254
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
great-13b/llm/config.json DELETED
@@ -1,33 +0,0 @@
1
- {
2
- "_name_or_path": "/home/jasonlu/checkpoints/vila-13b-stage2_5_sft_r2_16_more_no_left_padding_r6/llm",
3
- "architectures": [
4
- "LlamaForCausalLM"
5
- ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "bos_token_id": 1,
9
- "eos_token_id": 2,
10
- "hidden_act": "silu",
11
- "hidden_size": 5120,
12
- "initializer_range": 0.02,
13
- "intermediate_size": 13824,
14
- "max_length": 4096,
15
- "max_position_embeddings": 4096,
16
- "model_max_length": 4096,
17
- "model_type": "llama",
18
- "num_attention_heads": 40,
19
- "num_hidden_layers": 40,
20
- "num_key_value_heads": 40,
21
- "pad_token_id": 0,
22
- "pretraining_tp": 1,
23
- "rms_norm_eps": 1e-05,
24
- "rope_scaling": null,
25
- "rope_theta": 10000.0,
26
- "tie_word_embeddings": false,
27
- "tokenizer_model_max_length": 4096,
28
- "tokenizer_padding_side": "right",
29
- "torch_dtype": "bfloat16",
30
- "transformers_version": "4.36.2",
31
- "use_cache": true,
32
- "vocab_size": 32000
33
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
great-13b/llm/generation_config.json DELETED
@@ -1,11 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 1,
4
- "do_sample": true,
5
- "eos_token_id": 2,
6
- "max_length": 4096,
7
- "pad_token_id": 0,
8
- "temperature": 0.9,
9
- "top_p": 0.6,
10
- "transformers_version": "4.36.2"
11
- }
 
 
 
 
 
 
 
 
 
 
 
 
great-13b/llm/model-00001-of-00006.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:1a0d76e14d5a4dd872c7951a50d87bc4953e99ea9cf1904b957ca63da7b1bd81
3
- size 4978265800
 
 
 
 
great-13b/llm/model-00002-of-00006.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d94f066a80ba2471e6ad76ba5171e5f6742c8576b30e36532df3ef12eade2a55
3
- size 4970422232
 
 
 
 
great-13b/llm/model-00003-of-00006.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b4a33dadf153e1d2cd201bacbf91278e942e8db668385222e22509a983b8c9bc
3
- size 4970422256
 
 
 
 
great-13b/llm/model-00004-of-00006.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e1f9f5d7ce5ef6bc4f3e18e8fa4450065acf2c4d51286e319866cafad1f335b7
3
- size 4933701504
 
 
 
 
great-13b/llm/model-00005-of-00006.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7d1e36906518dce54a7dfbe7477c2911abfd5e93409db62a918a9c1378fff8d5
3
- size 4933722216
 
 
 
 
great-13b/llm/model-00006-of-00006.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a7a612e9807399989d941df39ddbf9513236143ad4f13906ea04f555740b80d6
3
- size 1245236920
 
 
 
 
great-13b/llm/model.safetensors.index.json DELETED
@@ -1,370 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_size": 26031728640
4
- },
5
- "weight_map": {
6
- "lm_head.weight": "model-00006-of-00006.safetensors",
7
- "model.embed_tokens.weight": "model-00001-of-00006.safetensors",
8
- "model.layers.0.input_layernorm.weight": "model-00001-of-00006.safetensors",
9
- "model.layers.0.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
10
- "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
11
- "model.layers.0.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
12
- "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
13
- "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
14
- "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
15
- "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
16
- "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
17
- "model.layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
18
- "model.layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
19
- "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
20
- "model.layers.1.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
21
- "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
22
- "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
23
- "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
24
- "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
25
- "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
26
- "model.layers.10.input_layernorm.weight": "model-00002-of-00006.safetensors",
27
- "model.layers.10.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
28
- "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
29
- "model.layers.10.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
30
- "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
31
- "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
32
- "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
33
- "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
34
- "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
35
- "model.layers.11.input_layernorm.weight": "model-00002-of-00006.safetensors",
36
- "model.layers.11.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
37
- "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
38
- "model.layers.11.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
39
- "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
40
- "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
41
- "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
42
- "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
43
- "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
44
- "model.layers.12.input_layernorm.weight": "model-00002-of-00006.safetensors",
45
- "model.layers.12.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
46
- "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
47
- "model.layers.12.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
48
- "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
49
- "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
50
- "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
51
- "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
52
- "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
53
- "model.layers.13.input_layernorm.weight": "model-00002-of-00006.safetensors",
54
- "model.layers.13.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
55
- "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
56
- "model.layers.13.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
57
- "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
58
- "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
59
- "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
60
- "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
61
- "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
62
- "model.layers.14.input_layernorm.weight": "model-00002-of-00006.safetensors",
63
- "model.layers.14.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
64
- "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
65
- "model.layers.14.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
66
- "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
67
- "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
68
- "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
69
- "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
70
- "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
71
- "model.layers.15.input_layernorm.weight": "model-00003-of-00006.safetensors",
72
- "model.layers.15.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
73
- "model.layers.15.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
74
- "model.layers.15.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
75
- "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
76
- "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
77
- "model.layers.15.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
78
- "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
79
- "model.layers.15.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
80
- "model.layers.16.input_layernorm.weight": "model-00003-of-00006.safetensors",
81
- "model.layers.16.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
82
- "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
83
- "model.layers.16.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
84
- "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
85
- "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
86
- "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
87
- "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
88
- "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
89
- "model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
90
- "model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
91
- "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
92
- "model.layers.17.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
93
- "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
94
- "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
95
- "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
96
- "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
97
- "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
98
- "model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
99
- "model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
100
- "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
101
- "model.layers.18.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
102
- "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
103
- "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
104
- "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
105
- "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
106
- "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
107
- "model.layers.19.input_layernorm.weight": "model-00003-of-00006.safetensors",
108
- "model.layers.19.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
109
- "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
110
- "model.layers.19.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
111
- "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
112
- "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
113
- "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
114
- "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
115
- "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
116
- "model.layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
117
- "model.layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
118
- "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
119
- "model.layers.2.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
120
- "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
121
- "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
122
- "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
123
- "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
124
- "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
125
- "model.layers.20.input_layernorm.weight": "model-00003-of-00006.safetensors",
126
- "model.layers.20.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
127
- "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
128
- "model.layers.20.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
129
- "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
130
- "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
131
- "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
132
- "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
133
- "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
134
- "model.layers.21.input_layernorm.weight": "model-00003-of-00006.safetensors",
135
- "model.layers.21.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
136
- "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
137
- "model.layers.21.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
138
- "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
139
- "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
140
- "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
141
- "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
142
- "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
143
- "model.layers.22.input_layernorm.weight": "model-00003-of-00006.safetensors",
144
- "model.layers.22.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
145
- "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
146
- "model.layers.22.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
147
- "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
148
- "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
149
- "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
150
- "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
151
- "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
152
- "model.layers.23.input_layernorm.weight": "model-00004-of-00006.safetensors",
153
- "model.layers.23.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
154
- "model.layers.23.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
155
- "model.layers.23.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
156
- "model.layers.23.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
157
- "model.layers.23.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
158
- "model.layers.23.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
159
- "model.layers.23.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
160
- "model.layers.23.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
161
- "model.layers.24.input_layernorm.weight": "model-00004-of-00006.safetensors",
162
- "model.layers.24.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
163
- "model.layers.24.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
164
- "model.layers.24.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
165
- "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
166
- "model.layers.24.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
167
- "model.layers.24.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
168
- "model.layers.24.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
169
- "model.layers.24.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
170
- "model.layers.25.input_layernorm.weight": "model-00004-of-00006.safetensors",
171
- "model.layers.25.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
172
- "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
173
- "model.layers.25.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
174
- "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
175
- "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
176
- "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
177
- "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
178
- "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
179
- "model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
180
- "model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
181
- "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
182
- "model.layers.26.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
183
- "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
184
- "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
185
- "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
186
- "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
187
- "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
188
- "model.layers.27.input_layernorm.weight": "model-00004-of-00006.safetensors",
189
- "model.layers.27.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
190
- "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
191
- "model.layers.27.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
192
- "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
193
- "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
194
- "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
195
- "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
196
- "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
197
- "model.layers.28.input_layernorm.weight": "model-00004-of-00006.safetensors",
198
- "model.layers.28.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
199
- "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
200
- "model.layers.28.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
201
- "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
202
- "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
203
- "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
204
- "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
205
- "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
206
- "model.layers.29.input_layernorm.weight": "model-00004-of-00006.safetensors",
207
- "model.layers.29.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
208
- "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
209
- "model.layers.29.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
210
- "model.layers.29.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
211
- "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
212
- "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
213
- "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
214
- "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
215
- "model.layers.3.input_layernorm.weight": "model-00001-of-00006.safetensors",
216
- "model.layers.3.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
217
- "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
218
- "model.layers.3.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
219
- "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
220
- "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
221
- "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
222
- "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
223
- "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
224
- "model.layers.30.input_layernorm.weight": "model-00005-of-00006.safetensors",
225
- "model.layers.30.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
226
- "model.layers.30.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
227
- "model.layers.30.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
228
- "model.layers.30.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
229
- "model.layers.30.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
230
- "model.layers.30.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
231
- "model.layers.30.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
232
- "model.layers.30.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
233
- "model.layers.31.input_layernorm.weight": "model-00005-of-00006.safetensors",
234
- "model.layers.31.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
235
- "model.layers.31.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
236
- "model.layers.31.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
237
- "model.layers.31.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
238
- "model.layers.31.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
239
- "model.layers.31.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
240
- "model.layers.31.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
241
- "model.layers.31.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
242
- "model.layers.32.input_layernorm.weight": "model-00005-of-00006.safetensors",
243
- "model.layers.32.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
244
- "model.layers.32.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
245
- "model.layers.32.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
246
- "model.layers.32.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
247
- "model.layers.32.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
248
- "model.layers.32.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
249
- "model.layers.32.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
250
- "model.layers.32.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
251
- "model.layers.33.input_layernorm.weight": "model-00005-of-00006.safetensors",
252
- "model.layers.33.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
253
- "model.layers.33.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
254
- "model.layers.33.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
255
- "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
256
- "model.layers.33.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
257
- "model.layers.33.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
258
- "model.layers.33.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
259
- "model.layers.33.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
260
- "model.layers.34.input_layernorm.weight": "model-00005-of-00006.safetensors",
261
- "model.layers.34.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
262
- "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
263
- "model.layers.34.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
264
- "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
265
- "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
266
- "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
267
- "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
268
- "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
269
- "model.layers.35.input_layernorm.weight": "model-00005-of-00006.safetensors",
270
- "model.layers.35.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
271
- "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
272
- "model.layers.35.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
273
- "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
274
- "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
275
- "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
276
- "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
277
- "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
278
- "model.layers.36.input_layernorm.weight": "model-00005-of-00006.safetensors",
279
- "model.layers.36.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
280
- "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
281
- "model.layers.36.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
282
- "model.layers.36.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
283
- "model.layers.36.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
284
- "model.layers.36.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
285
- "model.layers.36.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
286
- "model.layers.36.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
287
- "model.layers.37.input_layernorm.weight": "model-00005-of-00006.safetensors",
288
- "model.layers.37.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
289
- "model.layers.37.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
290
- "model.layers.37.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
291
- "model.layers.37.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
292
- "model.layers.37.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
293
- "model.layers.37.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
294
- "model.layers.37.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
295
- "model.layers.37.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
296
- "model.layers.38.input_layernorm.weight": "model-00006-of-00006.safetensors",
297
- "model.layers.38.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
298
- "model.layers.38.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
299
- "model.layers.38.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
300
- "model.layers.38.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
301
- "model.layers.38.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
302
- "model.layers.38.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
303
- "model.layers.38.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
304
- "model.layers.38.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
305
- "model.layers.39.input_layernorm.weight": "model-00006-of-00006.safetensors",
306
- "model.layers.39.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
307
- "model.layers.39.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
308
- "model.layers.39.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
309
- "model.layers.39.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
310
- "model.layers.39.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
311
- "model.layers.39.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
312
- "model.layers.39.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
313
- "model.layers.39.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
314
- "model.layers.4.input_layernorm.weight": "model-00001-of-00006.safetensors",
315
- "model.layers.4.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
316
- "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
317
- "model.layers.4.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
318
- "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
319
- "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
320
- "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
321
- "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
322
- "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
323
- "model.layers.5.input_layernorm.weight": "model-00001-of-00006.safetensors",
324
- "model.layers.5.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
325
- "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
326
- "model.layers.5.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
327
- "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
328
- "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
329
- "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
330
- "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
331
- "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
332
- "model.layers.6.input_layernorm.weight": "model-00001-of-00006.safetensors",
333
- "model.layers.6.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
334
- "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
335
- "model.layers.6.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
336
- "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
337
- "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
338
- "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
339
- "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
340
- "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
341
- "model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
342
- "model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
343
- "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
344
- "model.layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
345
- "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
346
- "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
347
- "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
348
- "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
349
- "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
350
- "model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
351
- "model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
352
- "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
353
- "model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
354
- "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
355
- "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
356
- "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
357
- "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
358
- "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
359
- "model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
360
- "model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
361
- "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
362
- "model.layers.9.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
363
- "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
364
- "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
365
- "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
366
- "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
367
- "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
368
- "model.norm.weight": "model-00006-of-00006.safetensors"
369
- }
370
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
great-13b/llm/special_tokens_map.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "</s>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "<unk>",
17
- "unk_token": {
18
- "content": "<unk>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
great-13b/llm/tokenizer.model DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7aedb3582ecda9fa99ee9242c17a9658f6744db083ee6ebdc8fb14857f84d220
3
- size 499723
 
 
 
 
great-13b/llm/tokenizer_config.json DELETED
@@ -1,43 +0,0 @@
1
- {
2
- "add_bos_token": true,
3
- "add_eos_token": false,
4
- "add_prefix_space": true,
5
- "added_tokens_decoder": {
6
- "0": {
7
- "content": "<unk>",
8
- "lstrip": false,
9
- "normalized": false,
10
- "rstrip": false,
11
- "single_word": false,
12
- "special": true
13
- },
14
- "1": {
15
- "content": "<s>",
16
- "lstrip": false,
17
- "normalized": false,
18
- "rstrip": false,
19
- "single_word": false,
20
- "special": true
21
- },
22
- "2": {
23
- "content": "</s>",
24
- "lstrip": false,
25
- "normalized": false,
26
- "rstrip": false,
27
- "single_word": false,
28
- "special": true
29
- }
30
- },
31
- "bos_token": "<s>",
32
- "clean_up_tokenization_spaces": false,
33
- "eos_token": "</s>",
34
- "legacy": false,
35
- "model_max_length": 4096,
36
- "pad_token": "<unk>",
37
- "padding_side": "right",
38
- "sp_model_kwargs": {},
39
- "spaces_between_special_tokens": false,
40
- "tokenizer_class": "LlamaTokenizer",
41
- "unk_token": "<unk>",
42
- "use_default_system_prompt": false
43
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
great-13b/mm_projector/config.json DELETED
@@ -1,10 +0,0 @@
1
- {
2
- "_name_or_path": "/home/jasonlu/checkpoints/vila-13b-stage2_5_sft_r2_16_more_no_left_padding_r6/mm_projector",
3
- "architectures": [
4
- "MultimodalProjector"
5
- ],
6
- "mm_projector_type": "mlp_downsample",
7
- "model_type": "v2l_projector",
8
- "torch_dtype": "bfloat16",
9
- "transformers_version": "4.36.2"
10
- }
 
 
 
 
 
 
 
 
 
 
 
great-13b/mm_projector/model.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a416ece8ddeae1f2323004203de28b1f4dd945ad7d43167a9bca7551d63cc4dd
3
- size 99654160
 
 
 
 
great-13b/trainer_state.json DELETED
The diff for this file is too large to render. See raw diff
 
great-13b/vision_tower/config.json DELETED
@@ -1,19 +0,0 @@
1
- {
2
- "_name_or_path": "/home/jasonlu/checkpoints/vila-13b-stage2_5_sft_r2_16_more_no_left_padding_r6/vision_tower",
3
- "architectures": [
4
- "SiglipVisionModel"
5
- ],
6
- "attention_dropout": 0.0,
7
- "hidden_act": "gelu_pytorch_tanh",
8
- "hidden_size": 1152,
9
- "image_size": 384,
10
- "intermediate_size": 4304,
11
- "layer_norm_eps": 1e-06,
12
- "model_type": "siglip_vision_model",
13
- "num_attention_heads": 16,
14
- "num_channels": 3,
15
- "num_hidden_layers": 27,
16
- "patch_size": 14,
17
- "torch_dtype": "bfloat16",
18
- "transformers_version": "4.36.2"
19
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
great-13b/vision_tower/model.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8979fb7020152ab503d5312627ffed841a9948e16a4af93592a8466cefdb0c12
3
- size 856506120
 
 
 
 
great-13b/vision_tower/preprocessor_config.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "do_convert_rgb": true,
3
- "do_normalize": true,
4
- "do_rescale": true,
5
- "do_resize": true,
6
- "image_mean": [
7
- 0.5,
8
- 0.5,
9
- 0.5
10
- ],
11
- "image_processor_type": "SiglipImageProcessor",
12
- "image_std": [
13
- 0.5,
14
- 0.5,
15
- 0.5
16
- ],
17
- "processor_class": "SiglipProcessor",
18
- "resample": 3,
19
- "rescale_factor": 0.00392156862745098,
20
- "size": {
21
- "height": 384,
22
- "width": 384
23
- }
24
- }