jayantdocplix commited on
Commit
63ead88
·
1 Parent(s): 8451189

Update pytorch_model.bin.index.json

Browse files
Files changed (1) hide show
  1. pytorch_model.bin.index.json +1 -95
pytorch_model.bin.index.json CHANGED
@@ -3,7 +3,6 @@
3
  "total_size": 52063508480
4
  },
5
  "weight_map": {
6
- "lm_head.weight": "pytorch_model-00006-of-00006.bin",
7
  "model.embed_tokens.weight": "pytorch_model-00001-of-00006.bin",
8
  "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00006.bin",
9
  "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00006.bin",
@@ -245,106 +244,13 @@
245
  "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00006.bin",
246
  "model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00006.bin",
247
  "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00006.bin",
248
- "model.layers.30.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
249
  "model.layers.30.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
250
  "model.layers.30.mlp.gate_proj.weight": "pytorch_model-00004-of-00006.bin",
251
- "model.layers.30.mlp.up_proj.weight": "pytorch_model-00005-of-00006.bin",
252
- "model.layers.30.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
253
  "model.layers.30.self_attn.k_proj.weight": "pytorch_model-00004-of-00006.bin",
254
  "model.layers.30.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
255
  "model.layers.30.self_attn.q_proj.weight": "pytorch_model-00004-of-00006.bin",
256
  "model.layers.30.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00006.bin",
257
  "model.layers.30.self_attn.v_proj.weight": "pytorch_model-00004-of-00006.bin",
258
- "model.layers.31.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
259
- "model.layers.31.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
260
- "model.layers.31.mlp.gate_proj.weight": "pytorch_model-00005-of-00006.bin",
261
- "model.layers.31.mlp.up_proj.weight": "pytorch_model-00005-of-00006.bin",
262
- "model.layers.31.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
263
- "model.layers.31.self_attn.k_proj.weight": "pytorch_model-00005-of-00006.bin",
264
- "model.layers.31.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
265
- "model.layers.31.self_attn.q_proj.weight": "pytorch_model-00005-of-00006.bin",
266
- "model.layers.31.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00006.bin",
267
- "model.layers.31.self_attn.v_proj.weight": "pytorch_model-00005-of-00006.bin",
268
- "model.layers.32.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
269
- "model.layers.32.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
270
- "model.layers.32.mlp.gate_proj.weight": "pytorch_model-00005-of-00006.bin",
271
- "model.layers.32.mlp.up_proj.weight": "pytorch_model-00005-of-00006.bin",
272
- "model.layers.32.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
273
- "model.layers.32.self_attn.k_proj.weight": "pytorch_model-00005-of-00006.bin",
274
- "model.layers.32.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
275
- "model.layers.32.self_attn.q_proj.weight": "pytorch_model-00005-of-00006.bin",
276
- "model.layers.32.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00006.bin",
277
- "model.layers.32.self_attn.v_proj.weight": "pytorch_model-00005-of-00006.bin",
278
- "model.layers.33.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
279
- "model.layers.33.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
280
- "model.layers.33.mlp.gate_proj.weight": "pytorch_model-00005-of-00006.bin",
281
- "model.layers.33.mlp.up_proj.weight": "pytorch_model-00005-of-00006.bin",
282
- "model.layers.33.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
283
- "model.layers.33.self_attn.k_proj.weight": "pytorch_model-00005-of-00006.bin",
284
- "model.layers.33.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
285
- "model.layers.33.self_attn.q_proj.weight": "pytorch_model-00005-of-00006.bin",
286
- "model.layers.33.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00006.bin",
287
- "model.layers.33.self_attn.v_proj.weight": "pytorch_model-00005-of-00006.bin",
288
- "model.layers.34.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
289
- "model.layers.34.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
290
- "model.layers.34.mlp.gate_proj.weight": "pytorch_model-00005-of-00006.bin",
291
- "model.layers.34.mlp.up_proj.weight": "pytorch_model-00005-of-00006.bin",
292
- "model.layers.34.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
293
- "model.layers.34.self_attn.k_proj.weight": "pytorch_model-00005-of-00006.bin",
294
- "model.layers.34.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
295
- "model.layers.34.self_attn.q_proj.weight": "pytorch_model-00005-of-00006.bin",
296
- "model.layers.34.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00006.bin",
297
- "model.layers.34.self_attn.v_proj.weight": "pytorch_model-00005-of-00006.bin",
298
- "model.layers.35.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
299
- "model.layers.35.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
300
- "model.layers.35.mlp.gate_proj.weight": "pytorch_model-00005-of-00006.bin",
301
- "model.layers.35.mlp.up_proj.weight": "pytorch_model-00005-of-00006.bin",
302
- "model.layers.35.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
303
- "model.layers.35.self_attn.k_proj.weight": "pytorch_model-00005-of-00006.bin",
304
- "model.layers.35.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
305
- "model.layers.35.self_attn.q_proj.weight": "pytorch_model-00005-of-00006.bin",
306
- "model.layers.35.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00006.bin",
307
- "model.layers.35.self_attn.v_proj.weight": "pytorch_model-00005-of-00006.bin",
308
- "model.layers.36.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
309
- "model.layers.36.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
310
- "model.layers.36.mlp.gate_proj.weight": "pytorch_model-00005-of-00006.bin",
311
- "model.layers.36.mlp.up_proj.weight": "pytorch_model-00005-of-00006.bin",
312
- "model.layers.36.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
313
- "model.layers.36.self_attn.k_proj.weight": "pytorch_model-00005-of-00006.bin",
314
- "model.layers.36.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
315
- "model.layers.36.self_attn.q_proj.weight": "pytorch_model-00005-of-00006.bin",
316
- "model.layers.36.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00006.bin",
317
- "model.layers.36.self_attn.v_proj.weight": "pytorch_model-00005-of-00006.bin",
318
- "model.layers.37.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
319
- "model.layers.37.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
320
- "model.layers.37.mlp.gate_proj.weight": "pytorch_model-00005-of-00006.bin",
321
- "model.layers.37.mlp.up_proj.weight": "pytorch_model-00005-of-00006.bin",
322
- "model.layers.37.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
323
- "model.layers.37.self_attn.k_proj.weight": "pytorch_model-00005-of-00006.bin",
324
- "model.layers.37.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
325
- "model.layers.37.self_attn.q_proj.weight": "pytorch_model-00005-of-00006.bin",
326
- "model.layers.37.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00006.bin",
327
- "model.layers.37.self_attn.v_proj.weight": "pytorch_model-00005-of-00006.bin",
328
- "model.layers.38.input_layernorm.weight": "pytorch_model-00006-of-00006.bin",
329
- "model.layers.38.mlp.down_proj.weight": "pytorch_model-00006-of-00006.bin",
330
- "model.layers.38.mlp.gate_proj.weight": "pytorch_model-00005-of-00006.bin",
331
- "model.layers.38.mlp.up_proj.weight": "pytorch_model-00006-of-00006.bin",
332
- "model.layers.38.post_attention_layernorm.weight": "pytorch_model-00006-of-00006.bin",
333
- "model.layers.38.self_attn.k_proj.weight": "pytorch_model-00005-of-00006.bin",
334
- "model.layers.38.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
335
- "model.layers.38.self_attn.q_proj.weight": "pytorch_model-00005-of-00006.bin",
336
- "model.layers.38.self_attn.rotary_emb.inv_freq": "pytorch_model-00005-of-00006.bin",
337
- "model.layers.38.self_attn.v_proj.weight": "pytorch_model-00005-of-00006.bin",
338
- "model.layers.39.input_layernorm.weight": "pytorch_model-00006-of-00006.bin",
339
- "model.layers.39.mlp.down_proj.weight": "pytorch_model-00006-of-00006.bin",
340
- "model.layers.39.mlp.gate_proj.weight": "pytorch_model-00006-of-00006.bin",
341
- "model.layers.39.mlp.up_proj.weight": "pytorch_model-00006-of-00006.bin",
342
- "model.layers.39.post_attention_layernorm.weight": "pytorch_model-00006-of-00006.bin",
343
- "model.layers.39.self_attn.k_proj.weight": "pytorch_model-00006-of-00006.bin",
344
- "model.layers.39.self_attn.o_proj.weight": "pytorch_model-00006-of-00006.bin",
345
- "model.layers.39.self_attn.q_proj.weight": "pytorch_model-00006-of-00006.bin",
346
- "model.layers.39.self_attn.rotary_emb.inv_freq": "pytorch_model-00006-of-00006.bin",
347
- "model.layers.39.self_attn.v_proj.weight": "pytorch_model-00006-of-00006.bin",
348
  "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00006.bin",
349
  "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00006.bin",
350
  "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00006.bin",
@@ -405,6 +311,6 @@
405
  "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00002-of-00006.bin",
406
  "model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00006.bin",
407
  "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00002-of-00006.bin",
408
- "model.norm.weight": "pytorch_model-00006-of-00006.bin"
409
  }
410
  }
 
3
  "total_size": 52063508480
4
  },
5
  "weight_map": {
 
6
  "model.embed_tokens.weight": "pytorch_model-00001-of-00006.bin",
7
  "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00006.bin",
8
  "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00006.bin",
 
244
  "model.layers.3.self_attn.q_proj.weight": "pytorch_model-00001-of-00006.bin",
245
  "model.layers.3.self_attn.rotary_emb.inv_freq": "pytorch_model-00001-of-00006.bin",
246
  "model.layers.3.self_attn.v_proj.weight": "pytorch_model-00001-of-00006.bin",
 
247
  "model.layers.30.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
248
  "model.layers.30.mlp.gate_proj.weight": "pytorch_model-00004-of-00006.bin",
 
 
249
  "model.layers.30.self_attn.k_proj.weight": "pytorch_model-00004-of-00006.bin",
250
  "model.layers.30.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
251
  "model.layers.30.self_attn.q_proj.weight": "pytorch_model-00004-of-00006.bin",
252
  "model.layers.30.self_attn.rotary_emb.inv_freq": "pytorch_model-00004-of-00006.bin",
253
  "model.layers.30.self_attn.v_proj.weight": "pytorch_model-00004-of-00006.bin",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
254
  "model.layers.4.input_layernorm.weight": "pytorch_model-00001-of-00006.bin",
255
  "model.layers.4.mlp.down_proj.weight": "pytorch_model-00001-of-00006.bin",
256
  "model.layers.4.mlp.gate_proj.weight": "pytorch_model-00001-of-00006.bin",
 
311
  "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00002-of-00006.bin",
312
  "model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00006.bin",
313
  "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00002-of-00006.bin",
314
+ "model.norm.weight": "pytorch_model-00001-of-00006.bin"
315
  }
316
  }