diff --git a/added_tokens.json b/added_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..98d35d40c8d1ffb6697507b84b5e0ba8ff554347 --- /dev/null +++ b/added_tokens.json @@ -0,0 +1,5 @@ +{ + "": 103168, + "<|modelname|>": 103169, + "<|modelorg|>": 103170 +} diff --git a/config.json b/config.json new file mode 100644 index 0000000000000000000000000000000000000000..747d6c302554d50e83cc76cdd30db5ee867613fe --- /dev/null +++ b/config.json @@ -0,0 +1,26 @@ +{ + "architectures": [ + "LlamaForCausalLM" + ], + "bos_token_id": 1, + "eos_token_id": 2, + "hidden_act": "silu", + "hidden_size": 5120, + "initializer_range": 0.02, + "intermediate_size": 13824, + "max_position_embeddings": 16384, + "model_type": "llama", + "num_attention_heads": 40, + "num_hidden_layers": 60, + "num_key_value_heads": 40, + "pad_token_id": 2, + "pretraining_tp": 1, + "rms_norm_eps": 1e-06, + "rope_scaling": null, + "rope_theta": 82684.62264056221, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.33.3", + "use_cache": true, + "vocab_size": 103424 +} diff --git a/pytorch_model.bin.index.json b/pytorch_model.bin.index.json new file mode 100644 index 0000000000000000000000000000000000000000..cc12d87ce21ae211519b6bb1159b09a6167f3ab0 --- /dev/null +++ b/pytorch_model.bin.index.json @@ -0,0 +1,550 @@ +{ + "metadata": { + "total_size": 40182671360 + }, + "weight_map": { + "lm_head.weight": "pytorch_model_00063-of-00063.bin", + "model.embed_tokens.weight": "pytorch_model_00001-of-00063.bin", + "model.layers.0.input_layernorm.weight": "pytorch_model_00002-of-00063.bin", + "model.layers.0.mlp.down_proj.weight": "pytorch_model_00002-of-00063.bin", + "model.layers.0.mlp.gate_proj.weight": "pytorch_model_00002-of-00063.bin", + "model.layers.0.mlp.up_proj.weight": "pytorch_model_00002-of-00063.bin", + "model.layers.0.post_attention_layernorm.weight": "pytorch_model_00002-of-00063.bin", + "model.layers.0.self_attn.k_proj.weight": "pytorch_model_00002-of-00063.bin", + "model.layers.0.self_attn.o_proj.weight": "pytorch_model_00002-of-00063.bin", + "model.layers.0.self_attn.q_proj.weight": "pytorch_model_00002-of-00063.bin", + "model.layers.0.self_attn.v_proj.weight": "pytorch_model_00002-of-00063.bin", + "model.layers.1.input_layernorm.weight": "pytorch_model_00003-of-00063.bin", + "model.layers.1.mlp.down_proj.weight": "pytorch_model_00003-of-00063.bin", + "model.layers.1.mlp.gate_proj.weight": "pytorch_model_00003-of-00063.bin", + "model.layers.1.mlp.up_proj.weight": "pytorch_model_00003-of-00063.bin", + "model.layers.1.post_attention_layernorm.weight": "pytorch_model_00003-of-00063.bin", + "model.layers.1.self_attn.k_proj.weight": "pytorch_model_00003-of-00063.bin", + "model.layers.1.self_attn.o_proj.weight": "pytorch_model_00003-of-00063.bin", + "model.layers.1.self_attn.q_proj.weight": "pytorch_model_00003-of-00063.bin", + "model.layers.1.self_attn.v_proj.weight": "pytorch_model_00003-of-00063.bin", + "model.layers.10.input_layernorm.weight": "pytorch_model_00012-of-00063.bin", + "model.layers.10.mlp.down_proj.weight": "pytorch_model_00012-of-00063.bin", + "model.layers.10.mlp.gate_proj.weight": "pytorch_model_00012-of-00063.bin", + "model.layers.10.mlp.up_proj.weight": "pytorch_model_00012-of-00063.bin", + "model.layers.10.post_attention_layernorm.weight": "pytorch_model_00012-of-00063.bin", + "model.layers.10.self_attn.k_proj.weight": "pytorch_model_00012-of-00063.bin", + "model.layers.10.self_attn.o_proj.weight": "pytorch_model_00012-of-00063.bin", + "model.layers.10.self_attn.q_proj.weight": "pytorch_model_00012-of-00063.bin", + "model.layers.10.self_attn.v_proj.weight": "pytorch_model_00012-of-00063.bin", + "model.layers.11.input_layernorm.weight": "pytorch_model_00013-of-00063.bin", + "model.layers.11.mlp.down_proj.weight": "pytorch_model_00013-of-00063.bin", + "model.layers.11.mlp.gate_proj.weight": "pytorch_model_00013-of-00063.bin", + "model.layers.11.mlp.up_proj.weight": "pytorch_model_00013-of-00063.bin", + "model.layers.11.post_attention_layernorm.weight": "pytorch_model_00013-of-00063.bin", + "model.layers.11.self_attn.k_proj.weight": "pytorch_model_00013-of-00063.bin", + "model.layers.11.self_attn.o_proj.weight": "pytorch_model_00013-of-00063.bin", + "model.layers.11.self_attn.q_proj.weight": "pytorch_model_00013-of-00063.bin", + "model.layers.11.self_attn.v_proj.weight": "pytorch_model_00013-of-00063.bin", + "model.layers.12.input_layernorm.weight": "pytorch_model_00014-of-00063.bin", + "model.layers.12.mlp.down_proj.weight": "pytorch_model_00014-of-00063.bin", + "model.layers.12.mlp.gate_proj.weight": "pytorch_model_00014-of-00063.bin", + "model.layers.12.mlp.up_proj.weight": "pytorch_model_00014-of-00063.bin", + "model.layers.12.post_attention_layernorm.weight": "pytorch_model_00014-of-00063.bin", + "model.layers.12.self_attn.k_proj.weight": "pytorch_model_00014-of-00063.bin", + "model.layers.12.self_attn.o_proj.weight": "pytorch_model_00014-of-00063.bin", + "model.layers.12.self_attn.q_proj.weight": "pytorch_model_00014-of-00063.bin", + "model.layers.12.self_attn.v_proj.weight": "pytorch_model_00014-of-00063.bin", + "model.layers.13.input_layernorm.weight": "pytorch_model_00015-of-00063.bin", + "model.layers.13.mlp.down_proj.weight": "pytorch_model_00015-of-00063.bin", + "model.layers.13.mlp.gate_proj.weight": "pytorch_model_00015-of-00063.bin", + "model.layers.13.mlp.up_proj.weight": "pytorch_model_00015-of-00063.bin", + "model.layers.13.post_attention_layernorm.weight": "pytorch_model_00015-of-00063.bin", + "model.layers.13.self_attn.k_proj.weight": "pytorch_model_00015-of-00063.bin", + "model.layers.13.self_attn.o_proj.weight": "pytorch_model_00015-of-00063.bin", + "model.layers.13.self_attn.q_proj.weight": "pytorch_model_00015-of-00063.bin", + "model.layers.13.self_attn.v_proj.weight": "pytorch_model_00015-of-00063.bin", + "model.layers.14.input_layernorm.weight": "pytorch_model_00016-of-00063.bin", + "model.layers.14.mlp.down_proj.weight": "pytorch_model_00016-of-00063.bin", + "model.layers.14.mlp.gate_proj.weight": "pytorch_model_00016-of-00063.bin", + "model.layers.14.mlp.up_proj.weight": "pytorch_model_00016-of-00063.bin", + "model.layers.14.post_attention_layernorm.weight": "pytorch_model_00016-of-00063.bin", + "model.layers.14.self_attn.k_proj.weight": "pytorch_model_00016-of-00063.bin", + "model.layers.14.self_attn.o_proj.weight": "pytorch_model_00016-of-00063.bin", + "model.layers.14.self_attn.q_proj.weight": "pytorch_model_00016-of-00063.bin", + "model.layers.14.self_attn.v_proj.weight": "pytorch_model_00016-of-00063.bin", + "model.layers.15.input_layernorm.weight": "pytorch_model_00017-of-00063.bin", + "model.layers.15.mlp.down_proj.weight": "pytorch_model_00017-of-00063.bin", + "model.layers.15.mlp.gate_proj.weight": "pytorch_model_00017-of-00063.bin", + "model.layers.15.mlp.up_proj.weight": "pytorch_model_00017-of-00063.bin", + "model.layers.15.post_attention_layernorm.weight": "pytorch_model_00017-of-00063.bin", + "model.layers.15.self_attn.k_proj.weight": "pytorch_model_00017-of-00063.bin", + "model.layers.15.self_attn.o_proj.weight": "pytorch_model_00017-of-00063.bin", + "model.layers.15.self_attn.q_proj.weight": "pytorch_model_00017-of-00063.bin", + "model.layers.15.self_attn.v_proj.weight": "pytorch_model_00017-of-00063.bin", + "model.layers.16.input_layernorm.weight": "pytorch_model_00018-of-00063.bin", + "model.layers.16.mlp.down_proj.weight": "pytorch_model_00018-of-00063.bin", + "model.layers.16.mlp.gate_proj.weight": "pytorch_model_00018-of-00063.bin", + "model.layers.16.mlp.up_proj.weight": "pytorch_model_00018-of-00063.bin", + "model.layers.16.post_attention_layernorm.weight": "pytorch_model_00018-of-00063.bin", + "model.layers.16.self_attn.k_proj.weight": "pytorch_model_00018-of-00063.bin", + "model.layers.16.self_attn.o_proj.weight": "pytorch_model_00018-of-00063.bin", + "model.layers.16.self_attn.q_proj.weight": "pytorch_model_00018-of-00063.bin", + "model.layers.16.self_attn.v_proj.weight": "pytorch_model_00018-of-00063.bin", + "model.layers.17.input_layernorm.weight": "pytorch_model_00019-of-00063.bin", + "model.layers.17.mlp.down_proj.weight": "pytorch_model_00019-of-00063.bin", + "model.layers.17.mlp.gate_proj.weight": "pytorch_model_00019-of-00063.bin", + "model.layers.17.mlp.up_proj.weight": "pytorch_model_00019-of-00063.bin", + "model.layers.17.post_attention_layernorm.weight": "pytorch_model_00019-of-00063.bin", + "model.layers.17.self_attn.k_proj.weight": "pytorch_model_00019-of-00063.bin", + "model.layers.17.self_attn.o_proj.weight": "pytorch_model_00019-of-00063.bin", + "model.layers.17.self_attn.q_proj.weight": "pytorch_model_00019-of-00063.bin", + "model.layers.17.self_attn.v_proj.weight": "pytorch_model_00019-of-00063.bin", + "model.layers.18.input_layernorm.weight": "pytorch_model_00020-of-00063.bin", + "model.layers.18.mlp.down_proj.weight": "pytorch_model_00020-of-00063.bin", + "model.layers.18.mlp.gate_proj.weight": "pytorch_model_00020-of-00063.bin", + "model.layers.18.mlp.up_proj.weight": "pytorch_model_00020-of-00063.bin", + "model.layers.18.post_attention_layernorm.weight": "pytorch_model_00020-of-00063.bin", + "model.layers.18.self_attn.k_proj.weight": "pytorch_model_00020-of-00063.bin", + "model.layers.18.self_attn.o_proj.weight": "pytorch_model_00020-of-00063.bin", + "model.layers.18.self_attn.q_proj.weight": "pytorch_model_00020-of-00063.bin", + "model.layers.18.self_attn.v_proj.weight": "pytorch_model_00020-of-00063.bin", + "model.layers.19.input_layernorm.weight": "pytorch_model_00021-of-00063.bin", + "model.layers.19.mlp.down_proj.weight": "pytorch_model_00021-of-00063.bin", + "model.layers.19.mlp.gate_proj.weight": "pytorch_model_00021-of-00063.bin", + "model.layers.19.mlp.up_proj.weight": "pytorch_model_00021-of-00063.bin", + "model.layers.19.post_attention_layernorm.weight": "pytorch_model_00021-of-00063.bin", + "model.layers.19.self_attn.k_proj.weight": "pytorch_model_00021-of-00063.bin", + "model.layers.19.self_attn.o_proj.weight": "pytorch_model_00021-of-00063.bin", + "model.layers.19.self_attn.q_proj.weight": "pytorch_model_00021-of-00063.bin", + "model.layers.19.self_attn.v_proj.weight": "pytorch_model_00021-of-00063.bin", + "model.layers.2.input_layernorm.weight": "pytorch_model_00004-of-00063.bin", + "model.layers.2.mlp.down_proj.weight": "pytorch_model_00004-of-00063.bin", + "model.layers.2.mlp.gate_proj.weight": "pytorch_model_00004-of-00063.bin", + "model.layers.2.mlp.up_proj.weight": "pytorch_model_00004-of-00063.bin", + "model.layers.2.post_attention_layernorm.weight": "pytorch_model_00004-of-00063.bin", + "model.layers.2.self_attn.k_proj.weight": "pytorch_model_00004-of-00063.bin", + "model.layers.2.self_attn.o_proj.weight": "pytorch_model_00004-of-00063.bin", + "model.layers.2.self_attn.q_proj.weight": "pytorch_model_00004-of-00063.bin", + "model.layers.2.self_attn.v_proj.weight": "pytorch_model_00004-of-00063.bin", + "model.layers.20.input_layernorm.weight": "pytorch_model_00022-of-00063.bin", + "model.layers.20.mlp.down_proj.weight": "pytorch_model_00022-of-00063.bin", + "model.layers.20.mlp.gate_proj.weight": "pytorch_model_00022-of-00063.bin", + "model.layers.20.mlp.up_proj.weight": "pytorch_model_00022-of-00063.bin", + "model.layers.20.post_attention_layernorm.weight": "pytorch_model_00022-of-00063.bin", + "model.layers.20.self_attn.k_proj.weight": "pytorch_model_00022-of-00063.bin", + "model.layers.20.self_attn.o_proj.weight": "pytorch_model_00022-of-00063.bin", + "model.layers.20.self_attn.q_proj.weight": "pytorch_model_00022-of-00063.bin", + "model.layers.20.self_attn.v_proj.weight": "pytorch_model_00022-of-00063.bin", + "model.layers.21.input_layernorm.weight": "pytorch_model_00023-of-00063.bin", + "model.layers.21.mlp.down_proj.weight": "pytorch_model_00023-of-00063.bin", + "model.layers.21.mlp.gate_proj.weight": "pytorch_model_00023-of-00063.bin", + "model.layers.21.mlp.up_proj.weight": "pytorch_model_00023-of-00063.bin", + "model.layers.21.post_attention_layernorm.weight": "pytorch_model_00023-of-00063.bin", + "model.layers.21.self_attn.k_proj.weight": "pytorch_model_00023-of-00063.bin", + "model.layers.21.self_attn.o_proj.weight": "pytorch_model_00023-of-00063.bin", + "model.layers.21.self_attn.q_proj.weight": "pytorch_model_00023-of-00063.bin", + "model.layers.21.self_attn.v_proj.weight": "pytorch_model_00023-of-00063.bin", + "model.layers.22.input_layernorm.weight": "pytorch_model_00024-of-00063.bin", + "model.layers.22.mlp.down_proj.weight": "pytorch_model_00024-of-00063.bin", + "model.layers.22.mlp.gate_proj.weight": "pytorch_model_00024-of-00063.bin", + "model.layers.22.mlp.up_proj.weight": "pytorch_model_00024-of-00063.bin", + "model.layers.22.post_attention_layernorm.weight": "pytorch_model_00024-of-00063.bin", + "model.layers.22.self_attn.k_proj.weight": "pytorch_model_00024-of-00063.bin", + "model.layers.22.self_attn.o_proj.weight": "pytorch_model_00024-of-00063.bin", + "model.layers.22.self_attn.q_proj.weight": "pytorch_model_00024-of-00063.bin", + "model.layers.22.self_attn.v_proj.weight": "pytorch_model_00024-of-00063.bin", + "model.layers.23.input_layernorm.weight": "pytorch_model_00025-of-00063.bin", + "model.layers.23.mlp.down_proj.weight": "pytorch_model_00025-of-00063.bin", + "model.layers.23.mlp.gate_proj.weight": "pytorch_model_00025-of-00063.bin", + "model.layers.23.mlp.up_proj.weight": "pytorch_model_00025-of-00063.bin", + "model.layers.23.post_attention_layernorm.weight": "pytorch_model_00025-of-00063.bin", + "model.layers.23.self_attn.k_proj.weight": "pytorch_model_00025-of-00063.bin", + "model.layers.23.self_attn.o_proj.weight": "pytorch_model_00025-of-00063.bin", + "model.layers.23.self_attn.q_proj.weight": "pytorch_model_00025-of-00063.bin", + "model.layers.23.self_attn.v_proj.weight": "pytorch_model_00025-of-00063.bin", + "model.layers.24.input_layernorm.weight": "pytorch_model_00026-of-00063.bin", + "model.layers.24.mlp.down_proj.weight": "pytorch_model_00026-of-00063.bin", + "model.layers.24.mlp.gate_proj.weight": "pytorch_model_00026-of-00063.bin", + "model.layers.24.mlp.up_proj.weight": "pytorch_model_00026-of-00063.bin", + "model.layers.24.post_attention_layernorm.weight": "pytorch_model_00026-of-00063.bin", + "model.layers.24.self_attn.k_proj.weight": "pytorch_model_00026-of-00063.bin", + "model.layers.24.self_attn.o_proj.weight": "pytorch_model_00026-of-00063.bin", + "model.layers.24.self_attn.q_proj.weight": "pytorch_model_00026-of-00063.bin", + "model.layers.24.self_attn.v_proj.weight": "pytorch_model_00026-of-00063.bin", + "model.layers.25.input_layernorm.weight": "pytorch_model_00027-of-00063.bin", + "model.layers.25.mlp.down_proj.weight": "pytorch_model_00027-of-00063.bin", + "model.layers.25.mlp.gate_proj.weight": "pytorch_model_00027-of-00063.bin", + "model.layers.25.mlp.up_proj.weight": "pytorch_model_00027-of-00063.bin", + "model.layers.25.post_attention_layernorm.weight": "pytorch_model_00027-of-00063.bin", + "model.layers.25.self_attn.k_proj.weight": "pytorch_model_00027-of-00063.bin", + "model.layers.25.self_attn.o_proj.weight": "pytorch_model_00027-of-00063.bin", + "model.layers.25.self_attn.q_proj.weight": "pytorch_model_00027-of-00063.bin", + "model.layers.25.self_attn.v_proj.weight": "pytorch_model_00027-of-00063.bin", + "model.layers.26.input_layernorm.weight": "pytorch_model_00028-of-00063.bin", + "model.layers.26.mlp.down_proj.weight": "pytorch_model_00028-of-00063.bin", + "model.layers.26.mlp.gate_proj.weight": "pytorch_model_00028-of-00063.bin", + "model.layers.26.mlp.up_proj.weight": "pytorch_model_00028-of-00063.bin", + "model.layers.26.post_attention_layernorm.weight": "pytorch_model_00028-of-00063.bin", + "model.layers.26.self_attn.k_proj.weight": "pytorch_model_00028-of-00063.bin", + "model.layers.26.self_attn.o_proj.weight": "pytorch_model_00028-of-00063.bin", + "model.layers.26.self_attn.q_proj.weight": "pytorch_model_00028-of-00063.bin", + "model.layers.26.self_attn.v_proj.weight": "pytorch_model_00028-of-00063.bin", + "model.layers.27.input_layernorm.weight": "pytorch_model_00029-of-00063.bin", + "model.layers.27.mlp.down_proj.weight": "pytorch_model_00029-of-00063.bin", + "model.layers.27.mlp.gate_proj.weight": "pytorch_model_00029-of-00063.bin", + "model.layers.27.mlp.up_proj.weight": "pytorch_model_00029-of-00063.bin", + "model.layers.27.post_attention_layernorm.weight": "pytorch_model_00029-of-00063.bin", + "model.layers.27.self_attn.k_proj.weight": "pytorch_model_00029-of-00063.bin", + "model.layers.27.self_attn.o_proj.weight": "pytorch_model_00029-of-00063.bin", + "model.layers.27.self_attn.q_proj.weight": "pytorch_model_00029-of-00063.bin", + "model.layers.27.self_attn.v_proj.weight": "pytorch_model_00029-of-00063.bin", + "model.layers.28.input_layernorm.weight": "pytorch_model_00030-of-00063.bin", + "model.layers.28.mlp.down_proj.weight": "pytorch_model_00030-of-00063.bin", + "model.layers.28.mlp.gate_proj.weight": "pytorch_model_00030-of-00063.bin", + "model.layers.28.mlp.up_proj.weight": "pytorch_model_00030-of-00063.bin", + "model.layers.28.post_attention_layernorm.weight": "pytorch_model_00030-of-00063.bin", + "model.layers.28.self_attn.k_proj.weight": "pytorch_model_00030-of-00063.bin", + "model.layers.28.self_attn.o_proj.weight": "pytorch_model_00030-of-00063.bin", + "model.layers.28.self_attn.q_proj.weight": "pytorch_model_00030-of-00063.bin", + "model.layers.28.self_attn.v_proj.weight": "pytorch_model_00030-of-00063.bin", + "model.layers.29.input_layernorm.weight": "pytorch_model_00031-of-00063.bin", + "model.layers.29.mlp.down_proj.weight": "pytorch_model_00031-of-00063.bin", + "model.layers.29.mlp.gate_proj.weight": "pytorch_model_00031-of-00063.bin", + "model.layers.29.mlp.up_proj.weight": "pytorch_model_00031-of-00063.bin", + "model.layers.29.post_attention_layernorm.weight": "pytorch_model_00031-of-00063.bin", + "model.layers.29.self_attn.k_proj.weight": "pytorch_model_00031-of-00063.bin", + "model.layers.29.self_attn.o_proj.weight": "pytorch_model_00031-of-00063.bin", + "model.layers.29.self_attn.q_proj.weight": "pytorch_model_00031-of-00063.bin", + "model.layers.29.self_attn.v_proj.weight": "pytorch_model_00031-of-00063.bin", + "model.layers.3.input_layernorm.weight": "pytorch_model_00005-of-00063.bin", + "model.layers.3.mlp.down_proj.weight": "pytorch_model_00005-of-00063.bin", + "model.layers.3.mlp.gate_proj.weight": "pytorch_model_00005-of-00063.bin", + "model.layers.3.mlp.up_proj.weight": "pytorch_model_00005-of-00063.bin", + "model.layers.3.post_attention_layernorm.weight": "pytorch_model_00005-of-00063.bin", + "model.layers.3.self_attn.k_proj.weight": "pytorch_model_00005-of-00063.bin", + "model.layers.3.self_attn.o_proj.weight": "pytorch_model_00005-of-00063.bin", + "model.layers.3.self_attn.q_proj.weight": "pytorch_model_00005-of-00063.bin", + "model.layers.3.self_attn.v_proj.weight": "pytorch_model_00005-of-00063.bin", + "model.layers.30.input_layernorm.weight": "pytorch_model_00032-of-00063.bin", + "model.layers.30.mlp.down_proj.weight": "pytorch_model_00032-of-00063.bin", + "model.layers.30.mlp.gate_proj.weight": "pytorch_model_00032-of-00063.bin", + "model.layers.30.mlp.up_proj.weight": "pytorch_model_00032-of-00063.bin", + "model.layers.30.post_attention_layernorm.weight": "pytorch_model_00032-of-00063.bin", + "model.layers.30.self_attn.k_proj.weight": "pytorch_model_00032-of-00063.bin", + "model.layers.30.self_attn.o_proj.weight": "pytorch_model_00032-of-00063.bin", + "model.layers.30.self_attn.q_proj.weight": "pytorch_model_00032-of-00063.bin", + "model.layers.30.self_attn.v_proj.weight": "pytorch_model_00032-of-00063.bin", + "model.layers.31.input_layernorm.weight": "pytorch_model_00033-of-00063.bin", + "model.layers.31.mlp.down_proj.weight": "pytorch_model_00033-of-00063.bin", + "model.layers.31.mlp.gate_proj.weight": "pytorch_model_00033-of-00063.bin", + "model.layers.31.mlp.up_proj.weight": "pytorch_model_00033-of-00063.bin", + "model.layers.31.post_attention_layernorm.weight": "pytorch_model_00033-of-00063.bin", + "model.layers.31.self_attn.k_proj.weight": "pytorch_model_00033-of-00063.bin", + "model.layers.31.self_attn.o_proj.weight": "pytorch_model_00033-of-00063.bin", + "model.layers.31.self_attn.q_proj.weight": "pytorch_model_00033-of-00063.bin", + "model.layers.31.self_attn.v_proj.weight": "pytorch_model_00033-of-00063.bin", + "model.layers.32.input_layernorm.weight": "pytorch_model_00034-of-00063.bin", + "model.layers.32.mlp.down_proj.weight": "pytorch_model_00034-of-00063.bin", + "model.layers.32.mlp.gate_proj.weight": "pytorch_model_00034-of-00063.bin", + "model.layers.32.mlp.up_proj.weight": "pytorch_model_00034-of-00063.bin", + "model.layers.32.post_attention_layernorm.weight": "pytorch_model_00034-of-00063.bin", + "model.layers.32.self_attn.k_proj.weight": "pytorch_model_00034-of-00063.bin", + "model.layers.32.self_attn.o_proj.weight": "pytorch_model_00034-of-00063.bin", + "model.layers.32.self_attn.q_proj.weight": "pytorch_model_00034-of-00063.bin", + "model.layers.32.self_attn.v_proj.weight": "pytorch_model_00034-of-00063.bin", + "model.layers.33.input_layernorm.weight": "pytorch_model_00035-of-00063.bin", + "model.layers.33.mlp.down_proj.weight": "pytorch_model_00035-of-00063.bin", + "model.layers.33.mlp.gate_proj.weight": "pytorch_model_00035-of-00063.bin", + "model.layers.33.mlp.up_proj.weight": "pytorch_model_00035-of-00063.bin", + "model.layers.33.post_attention_layernorm.weight": "pytorch_model_00035-of-00063.bin", + "model.layers.33.self_attn.k_proj.weight": "pytorch_model_00035-of-00063.bin", + "model.layers.33.self_attn.o_proj.weight": "pytorch_model_00035-of-00063.bin", + "model.layers.33.self_attn.q_proj.weight": "pytorch_model_00035-of-00063.bin", + "model.layers.33.self_attn.v_proj.weight": "pytorch_model_00035-of-00063.bin", + "model.layers.34.input_layernorm.weight": "pytorch_model_00036-of-00063.bin", + "model.layers.34.mlp.down_proj.weight": "pytorch_model_00036-of-00063.bin", + "model.layers.34.mlp.gate_proj.weight": "pytorch_model_00036-of-00063.bin", + "model.layers.34.mlp.up_proj.weight": "pytorch_model_00036-of-00063.bin", + "model.layers.34.post_attention_layernorm.weight": "pytorch_model_00036-of-00063.bin", + "model.layers.34.self_attn.k_proj.weight": "pytorch_model_00036-of-00063.bin", + "model.layers.34.self_attn.o_proj.weight": "pytorch_model_00036-of-00063.bin", + "model.layers.34.self_attn.q_proj.weight": "pytorch_model_00036-of-00063.bin", + "model.layers.34.self_attn.v_proj.weight": "pytorch_model_00036-of-00063.bin", + "model.layers.35.input_layernorm.weight": "pytorch_model_00037-of-00063.bin", + "model.layers.35.mlp.down_proj.weight": "pytorch_model_00037-of-00063.bin", + "model.layers.35.mlp.gate_proj.weight": "pytorch_model_00037-of-00063.bin", + "model.layers.35.mlp.up_proj.weight": "pytorch_model_00037-of-00063.bin", + "model.layers.35.post_attention_layernorm.weight": "pytorch_model_00037-of-00063.bin", + "model.layers.35.self_attn.k_proj.weight": "pytorch_model_00037-of-00063.bin", + "model.layers.35.self_attn.o_proj.weight": "pytorch_model_00037-of-00063.bin", + "model.layers.35.self_attn.q_proj.weight": "pytorch_model_00037-of-00063.bin", + "model.layers.35.self_attn.v_proj.weight": "pytorch_model_00037-of-00063.bin", + "model.layers.36.input_layernorm.weight": "pytorch_model_00038-of-00063.bin", + "model.layers.36.mlp.down_proj.weight": "pytorch_model_00038-of-00063.bin", + "model.layers.36.mlp.gate_proj.weight": "pytorch_model_00038-of-00063.bin", + "model.layers.36.mlp.up_proj.weight": "pytorch_model_00038-of-00063.bin", + "model.layers.36.post_attention_layernorm.weight": "pytorch_model_00038-of-00063.bin", + "model.layers.36.self_attn.k_proj.weight": "pytorch_model_00038-of-00063.bin", + "model.layers.36.self_attn.o_proj.weight": "pytorch_model_00038-of-00063.bin", + "model.layers.36.self_attn.q_proj.weight": "pytorch_model_00038-of-00063.bin", + "model.layers.36.self_attn.v_proj.weight": "pytorch_model_00038-of-00063.bin", + "model.layers.37.input_layernorm.weight": "pytorch_model_00039-of-00063.bin", + "model.layers.37.mlp.down_proj.weight": "pytorch_model_00039-of-00063.bin", + "model.layers.37.mlp.gate_proj.weight": "pytorch_model_00039-of-00063.bin", + "model.layers.37.mlp.up_proj.weight": "pytorch_model_00039-of-00063.bin", + "model.layers.37.post_attention_layernorm.weight": "pytorch_model_00039-of-00063.bin", + "model.layers.37.self_attn.k_proj.weight": "pytorch_model_00039-of-00063.bin", + "model.layers.37.self_attn.o_proj.weight": "pytorch_model_00039-of-00063.bin", + "model.layers.37.self_attn.q_proj.weight": "pytorch_model_00039-of-00063.bin", + "model.layers.37.self_attn.v_proj.weight": "pytorch_model_00039-of-00063.bin", + "model.layers.38.input_layernorm.weight": "pytorch_model_00040-of-00063.bin", + "model.layers.38.mlp.down_proj.weight": "pytorch_model_00040-of-00063.bin", + "model.layers.38.mlp.gate_proj.weight": "pytorch_model_00040-of-00063.bin", + "model.layers.38.mlp.up_proj.weight": "pytorch_model_00040-of-00063.bin", + "model.layers.38.post_attention_layernorm.weight": "pytorch_model_00040-of-00063.bin", + "model.layers.38.self_attn.k_proj.weight": "pytorch_model_00040-of-00063.bin", + "model.layers.38.self_attn.o_proj.weight": "pytorch_model_00040-of-00063.bin", + "model.layers.38.self_attn.q_proj.weight": "pytorch_model_00040-of-00063.bin", + "model.layers.38.self_attn.v_proj.weight": "pytorch_model_00040-of-00063.bin", + "model.layers.39.input_layernorm.weight": "pytorch_model_00041-of-00063.bin", + "model.layers.39.mlp.down_proj.weight": "pytorch_model_00041-of-00063.bin", + "model.layers.39.mlp.gate_proj.weight": "pytorch_model_00041-of-00063.bin", + "model.layers.39.mlp.up_proj.weight": "pytorch_model_00041-of-00063.bin", + "model.layers.39.post_attention_layernorm.weight": "pytorch_model_00041-of-00063.bin", + "model.layers.39.self_attn.k_proj.weight": "pytorch_model_00041-of-00063.bin", + "model.layers.39.self_attn.o_proj.weight": "pytorch_model_00041-of-00063.bin", + "model.layers.39.self_attn.q_proj.weight": "pytorch_model_00041-of-00063.bin", + "model.layers.39.self_attn.v_proj.weight": "pytorch_model_00041-of-00063.bin", + "model.layers.4.input_layernorm.weight": "pytorch_model_00006-of-00063.bin", + "model.layers.4.mlp.down_proj.weight": "pytorch_model_00006-of-00063.bin", + "model.layers.4.mlp.gate_proj.weight": "pytorch_model_00006-of-00063.bin", + "model.layers.4.mlp.up_proj.weight": "pytorch_model_00006-of-00063.bin", + "model.layers.4.post_attention_layernorm.weight": "pytorch_model_00006-of-00063.bin", + "model.layers.4.self_attn.k_proj.weight": "pytorch_model_00006-of-00063.bin", + "model.layers.4.self_attn.o_proj.weight": "pytorch_model_00006-of-00063.bin", + "model.layers.4.self_attn.q_proj.weight": "pytorch_model_00006-of-00063.bin", + "model.layers.4.self_attn.v_proj.weight": "pytorch_model_00006-of-00063.bin", + "model.layers.40.input_layernorm.weight": "pytorch_model_00042-of-00063.bin", + "model.layers.40.mlp.down_proj.weight": "pytorch_model_00042-of-00063.bin", + "model.layers.40.mlp.gate_proj.weight": "pytorch_model_00042-of-00063.bin", + "model.layers.40.mlp.up_proj.weight": "pytorch_model_00042-of-00063.bin", + "model.layers.40.post_attention_layernorm.weight": "pytorch_model_00042-of-00063.bin", + "model.layers.40.self_attn.k_proj.weight": "pytorch_model_00042-of-00063.bin", + "model.layers.40.self_attn.o_proj.weight": "pytorch_model_00042-of-00063.bin", + "model.layers.40.self_attn.q_proj.weight": "pytorch_model_00042-of-00063.bin", + "model.layers.40.self_attn.v_proj.weight": "pytorch_model_00042-of-00063.bin", + "model.layers.41.input_layernorm.weight": "pytorch_model_00043-of-00063.bin", + "model.layers.41.mlp.down_proj.weight": "pytorch_model_00043-of-00063.bin", + "model.layers.41.mlp.gate_proj.weight": "pytorch_model_00043-of-00063.bin", + "model.layers.41.mlp.up_proj.weight": "pytorch_model_00043-of-00063.bin", + "model.layers.41.post_attention_layernorm.weight": "pytorch_model_00043-of-00063.bin", + "model.layers.41.self_attn.k_proj.weight": "pytorch_model_00043-of-00063.bin", + "model.layers.41.self_attn.o_proj.weight": "pytorch_model_00043-of-00063.bin", + "model.layers.41.self_attn.q_proj.weight": "pytorch_model_00043-of-00063.bin", + "model.layers.41.self_attn.v_proj.weight": "pytorch_model_00043-of-00063.bin", + "model.layers.42.input_layernorm.weight": "pytorch_model_00044-of-00063.bin", + "model.layers.42.mlp.down_proj.weight": "pytorch_model_00044-of-00063.bin", + "model.layers.42.mlp.gate_proj.weight": "pytorch_model_00044-of-00063.bin", + "model.layers.42.mlp.up_proj.weight": "pytorch_model_00044-of-00063.bin", + "model.layers.42.post_attention_layernorm.weight": "pytorch_model_00044-of-00063.bin", + "model.layers.42.self_attn.k_proj.weight": "pytorch_model_00044-of-00063.bin", + "model.layers.42.self_attn.o_proj.weight": "pytorch_model_00044-of-00063.bin", + "model.layers.42.self_attn.q_proj.weight": "pytorch_model_00044-of-00063.bin", + "model.layers.42.self_attn.v_proj.weight": "pytorch_model_00044-of-00063.bin", + "model.layers.43.input_layernorm.weight": "pytorch_model_00045-of-00063.bin", + "model.layers.43.mlp.down_proj.weight": "pytorch_model_00045-of-00063.bin", + "model.layers.43.mlp.gate_proj.weight": "pytorch_model_00045-of-00063.bin", + "model.layers.43.mlp.up_proj.weight": "pytorch_model_00045-of-00063.bin", + "model.layers.43.post_attention_layernorm.weight": "pytorch_model_00045-of-00063.bin", + "model.layers.43.self_attn.k_proj.weight": "pytorch_model_00045-of-00063.bin", + "model.layers.43.self_attn.o_proj.weight": "pytorch_model_00045-of-00063.bin", + "model.layers.43.self_attn.q_proj.weight": "pytorch_model_00045-of-00063.bin", + "model.layers.43.self_attn.v_proj.weight": "pytorch_model_00045-of-00063.bin", + "model.layers.44.input_layernorm.weight": "pytorch_model_00046-of-00063.bin", + "model.layers.44.mlp.down_proj.weight": "pytorch_model_00046-of-00063.bin", + "model.layers.44.mlp.gate_proj.weight": "pytorch_model_00046-of-00063.bin", + "model.layers.44.mlp.up_proj.weight": "pytorch_model_00046-of-00063.bin", + "model.layers.44.post_attention_layernorm.weight": "pytorch_model_00046-of-00063.bin", + "model.layers.44.self_attn.k_proj.weight": "pytorch_model_00046-of-00063.bin", + "model.layers.44.self_attn.o_proj.weight": "pytorch_model_00046-of-00063.bin", + "model.layers.44.self_attn.q_proj.weight": "pytorch_model_00046-of-00063.bin", + "model.layers.44.self_attn.v_proj.weight": "pytorch_model_00046-of-00063.bin", + "model.layers.45.input_layernorm.weight": "pytorch_model_00047-of-00063.bin", + "model.layers.45.mlp.down_proj.weight": "pytorch_model_00047-of-00063.bin", + "model.layers.45.mlp.gate_proj.weight": "pytorch_model_00047-of-00063.bin", + "model.layers.45.mlp.up_proj.weight": "pytorch_model_00047-of-00063.bin", + "model.layers.45.post_attention_layernorm.weight": "pytorch_model_00047-of-00063.bin", + "model.layers.45.self_attn.k_proj.weight": "pytorch_model_00047-of-00063.bin", + "model.layers.45.self_attn.o_proj.weight": "pytorch_model_00047-of-00063.bin", + "model.layers.45.self_attn.q_proj.weight": "pytorch_model_00047-of-00063.bin", + "model.layers.45.self_attn.v_proj.weight": "pytorch_model_00047-of-00063.bin", + "model.layers.46.input_layernorm.weight": "pytorch_model_00048-of-00063.bin", + "model.layers.46.mlp.down_proj.weight": "pytorch_model_00048-of-00063.bin", + "model.layers.46.mlp.gate_proj.weight": "pytorch_model_00048-of-00063.bin", + "model.layers.46.mlp.up_proj.weight": "pytorch_model_00048-of-00063.bin", + "model.layers.46.post_attention_layernorm.weight": "pytorch_model_00048-of-00063.bin", + "model.layers.46.self_attn.k_proj.weight": "pytorch_model_00048-of-00063.bin", + "model.layers.46.self_attn.o_proj.weight": "pytorch_model_00048-of-00063.bin", + "model.layers.46.self_attn.q_proj.weight": "pytorch_model_00048-of-00063.bin", + "model.layers.46.self_attn.v_proj.weight": "pytorch_model_00048-of-00063.bin", + "model.layers.47.input_layernorm.weight": "pytorch_model_00049-of-00063.bin", + "model.layers.47.mlp.down_proj.weight": "pytorch_model_00049-of-00063.bin", + "model.layers.47.mlp.gate_proj.weight": "pytorch_model_00049-of-00063.bin", + "model.layers.47.mlp.up_proj.weight": "pytorch_model_00049-of-00063.bin", + "model.layers.47.post_attention_layernorm.weight": "pytorch_model_00049-of-00063.bin", + "model.layers.47.self_attn.k_proj.weight": "pytorch_model_00049-of-00063.bin", + "model.layers.47.self_attn.o_proj.weight": "pytorch_model_00049-of-00063.bin", + "model.layers.47.self_attn.q_proj.weight": "pytorch_model_00049-of-00063.bin", + "model.layers.47.self_attn.v_proj.weight": "pytorch_model_00049-of-00063.bin", + "model.layers.48.input_layernorm.weight": "pytorch_model_00050-of-00063.bin", + "model.layers.48.mlp.down_proj.weight": "pytorch_model_00050-of-00063.bin", + "model.layers.48.mlp.gate_proj.weight": "pytorch_model_00050-of-00063.bin", + "model.layers.48.mlp.up_proj.weight": "pytorch_model_00050-of-00063.bin", + "model.layers.48.post_attention_layernorm.weight": "pytorch_model_00050-of-00063.bin", + "model.layers.48.self_attn.k_proj.weight": "pytorch_model_00050-of-00063.bin", + "model.layers.48.self_attn.o_proj.weight": "pytorch_model_00050-of-00063.bin", + "model.layers.48.self_attn.q_proj.weight": "pytorch_model_00050-of-00063.bin", + "model.layers.48.self_attn.v_proj.weight": "pytorch_model_00050-of-00063.bin", + "model.layers.49.input_layernorm.weight": "pytorch_model_00051-of-00063.bin", + "model.layers.49.mlp.down_proj.weight": "pytorch_model_00051-of-00063.bin", + "model.layers.49.mlp.gate_proj.weight": "pytorch_model_00051-of-00063.bin", + "model.layers.49.mlp.up_proj.weight": "pytorch_model_00051-of-00063.bin", + "model.layers.49.post_attention_layernorm.weight": "pytorch_model_00051-of-00063.bin", + "model.layers.49.self_attn.k_proj.weight": "pytorch_model_00051-of-00063.bin", + "model.layers.49.self_attn.o_proj.weight": "pytorch_model_00051-of-00063.bin", + "model.layers.49.self_attn.q_proj.weight": "pytorch_model_00051-of-00063.bin", + "model.layers.49.self_attn.v_proj.weight": "pytorch_model_00051-of-00063.bin", + "model.layers.5.input_layernorm.weight": "pytorch_model_00007-of-00063.bin", + "model.layers.5.mlp.down_proj.weight": "pytorch_model_00007-of-00063.bin", + "model.layers.5.mlp.gate_proj.weight": "pytorch_model_00007-of-00063.bin", + "model.layers.5.mlp.up_proj.weight": "pytorch_model_00007-of-00063.bin", + "model.layers.5.post_attention_layernorm.weight": "pytorch_model_00007-of-00063.bin", + "model.layers.5.self_attn.k_proj.weight": "pytorch_model_00007-of-00063.bin", + "model.layers.5.self_attn.o_proj.weight": "pytorch_model_00007-of-00063.bin", + "model.layers.5.self_attn.q_proj.weight": "pytorch_model_00007-of-00063.bin", + "model.layers.5.self_attn.v_proj.weight": "pytorch_model_00007-of-00063.bin", + "model.layers.50.input_layernorm.weight": "pytorch_model_00052-of-00063.bin", + "model.layers.50.mlp.down_proj.weight": "pytorch_model_00052-of-00063.bin", + "model.layers.50.mlp.gate_proj.weight": "pytorch_model_00052-of-00063.bin", + "model.layers.50.mlp.up_proj.weight": "pytorch_model_00052-of-00063.bin", + "model.layers.50.post_attention_layernorm.weight": "pytorch_model_00052-of-00063.bin", + "model.layers.50.self_attn.k_proj.weight": "pytorch_model_00052-of-00063.bin", + "model.layers.50.self_attn.o_proj.weight": "pytorch_model_00052-of-00063.bin", + "model.layers.50.self_attn.q_proj.weight": "pytorch_model_00052-of-00063.bin", + "model.layers.50.self_attn.v_proj.weight": "pytorch_model_00052-of-00063.bin", + "model.layers.51.input_layernorm.weight": "pytorch_model_00053-of-00063.bin", + "model.layers.51.mlp.down_proj.weight": "pytorch_model_00053-of-00063.bin", + "model.layers.51.mlp.gate_proj.weight": "pytorch_model_00053-of-00063.bin", + "model.layers.51.mlp.up_proj.weight": "pytorch_model_00053-of-00063.bin", + "model.layers.51.post_attention_layernorm.weight": "pytorch_model_00053-of-00063.bin", + "model.layers.51.self_attn.k_proj.weight": "pytorch_model_00053-of-00063.bin", + "model.layers.51.self_attn.o_proj.weight": "pytorch_model_00053-of-00063.bin", + "model.layers.51.self_attn.q_proj.weight": "pytorch_model_00053-of-00063.bin", + "model.layers.51.self_attn.v_proj.weight": "pytorch_model_00053-of-00063.bin", + "model.layers.52.input_layernorm.weight": "pytorch_model_00054-of-00063.bin", + "model.layers.52.mlp.down_proj.weight": "pytorch_model_00054-of-00063.bin", + "model.layers.52.mlp.gate_proj.weight": "pytorch_model_00054-of-00063.bin", + "model.layers.52.mlp.up_proj.weight": "pytorch_model_00054-of-00063.bin", + "model.layers.52.post_attention_layernorm.weight": "pytorch_model_00054-of-00063.bin", + "model.layers.52.self_attn.k_proj.weight": "pytorch_model_00054-of-00063.bin", + "model.layers.52.self_attn.o_proj.weight": "pytorch_model_00054-of-00063.bin", + "model.layers.52.self_attn.q_proj.weight": "pytorch_model_00054-of-00063.bin", + "model.layers.52.self_attn.v_proj.weight": "pytorch_model_00054-of-00063.bin", + "model.layers.53.input_layernorm.weight": "pytorch_model_00055-of-00063.bin", + "model.layers.53.mlp.down_proj.weight": "pytorch_model_00055-of-00063.bin", + "model.layers.53.mlp.gate_proj.weight": "pytorch_model_00055-of-00063.bin", + "model.layers.53.mlp.up_proj.weight": "pytorch_model_00055-of-00063.bin", + "model.layers.53.post_attention_layernorm.weight": "pytorch_model_00055-of-00063.bin", + "model.layers.53.self_attn.k_proj.weight": "pytorch_model_00055-of-00063.bin", + "model.layers.53.self_attn.o_proj.weight": "pytorch_model_00055-of-00063.bin", + "model.layers.53.self_attn.q_proj.weight": "pytorch_model_00055-of-00063.bin", + "model.layers.53.self_attn.v_proj.weight": "pytorch_model_00055-of-00063.bin", + "model.layers.54.input_layernorm.weight": "pytorch_model_00056-of-00063.bin", + "model.layers.54.mlp.down_proj.weight": "pytorch_model_00056-of-00063.bin", + "model.layers.54.mlp.gate_proj.weight": "pytorch_model_00056-of-00063.bin", + "model.layers.54.mlp.up_proj.weight": "pytorch_model_00056-of-00063.bin", + "model.layers.54.post_attention_layernorm.weight": "pytorch_model_00056-of-00063.bin", + "model.layers.54.self_attn.k_proj.weight": "pytorch_model_00056-of-00063.bin", + "model.layers.54.self_attn.o_proj.weight": "pytorch_model_00056-of-00063.bin", + "model.layers.54.self_attn.q_proj.weight": "pytorch_model_00056-of-00063.bin", + "model.layers.54.self_attn.v_proj.weight": "pytorch_model_00056-of-00063.bin", + "model.layers.55.input_layernorm.weight": "pytorch_model_00057-of-00063.bin", + "model.layers.55.mlp.down_proj.weight": "pytorch_model_00057-of-00063.bin", + "model.layers.55.mlp.gate_proj.weight": "pytorch_model_00057-of-00063.bin", + "model.layers.55.mlp.up_proj.weight": "pytorch_model_00057-of-00063.bin", + "model.layers.55.post_attention_layernorm.weight": "pytorch_model_00057-of-00063.bin", + "model.layers.55.self_attn.k_proj.weight": "pytorch_model_00057-of-00063.bin", + "model.layers.55.self_attn.o_proj.weight": "pytorch_model_00057-of-00063.bin", + "model.layers.55.self_attn.q_proj.weight": "pytorch_model_00057-of-00063.bin", + "model.layers.55.self_attn.v_proj.weight": "pytorch_model_00057-of-00063.bin", + "model.layers.56.input_layernorm.weight": "pytorch_model_00058-of-00063.bin", + "model.layers.56.mlp.down_proj.weight": "pytorch_model_00058-of-00063.bin", + "model.layers.56.mlp.gate_proj.weight": "pytorch_model_00058-of-00063.bin", + "model.layers.56.mlp.up_proj.weight": "pytorch_model_00058-of-00063.bin", + "model.layers.56.post_attention_layernorm.weight": "pytorch_model_00058-of-00063.bin", + "model.layers.56.self_attn.k_proj.weight": "pytorch_model_00058-of-00063.bin", + "model.layers.56.self_attn.o_proj.weight": "pytorch_model_00058-of-00063.bin", + "model.layers.56.self_attn.q_proj.weight": "pytorch_model_00058-of-00063.bin", + "model.layers.56.self_attn.v_proj.weight": "pytorch_model_00058-of-00063.bin", + "model.layers.57.input_layernorm.weight": "pytorch_model_00059-of-00063.bin", + "model.layers.57.mlp.down_proj.weight": "pytorch_model_00059-of-00063.bin", + "model.layers.57.mlp.gate_proj.weight": "pytorch_model_00059-of-00063.bin", + "model.layers.57.mlp.up_proj.weight": "pytorch_model_00059-of-00063.bin", + "model.layers.57.post_attention_layernorm.weight": "pytorch_model_00059-of-00063.bin", + "model.layers.57.self_attn.k_proj.weight": "pytorch_model_00059-of-00063.bin", + "model.layers.57.self_attn.o_proj.weight": "pytorch_model_00059-of-00063.bin", + "model.layers.57.self_attn.q_proj.weight": "pytorch_model_00059-of-00063.bin", + "model.layers.57.self_attn.v_proj.weight": "pytorch_model_00059-of-00063.bin", + "model.layers.58.input_layernorm.weight": "pytorch_model_00060-of-00063.bin", + "model.layers.58.mlp.down_proj.weight": "pytorch_model_00060-of-00063.bin", + "model.layers.58.mlp.gate_proj.weight": "pytorch_model_00060-of-00063.bin", + "model.layers.58.mlp.up_proj.weight": "pytorch_model_00060-of-00063.bin", + "model.layers.58.post_attention_layernorm.weight": "pytorch_model_00060-of-00063.bin", + "model.layers.58.self_attn.k_proj.weight": "pytorch_model_00060-of-00063.bin", + "model.layers.58.self_attn.o_proj.weight": "pytorch_model_00060-of-00063.bin", + "model.layers.58.self_attn.q_proj.weight": "pytorch_model_00060-of-00063.bin", + "model.layers.58.self_attn.v_proj.weight": "pytorch_model_00060-of-00063.bin", + "model.layers.59.input_layernorm.weight": "pytorch_model_00061-of-00063.bin", + "model.layers.59.mlp.down_proj.weight": "pytorch_model_00061-of-00063.bin", + "model.layers.59.mlp.gate_proj.weight": "pytorch_model_00061-of-00063.bin", + "model.layers.59.mlp.up_proj.weight": "pytorch_model_00061-of-00063.bin", + "model.layers.59.post_attention_layernorm.weight": "pytorch_model_00061-of-00063.bin", + "model.layers.59.self_attn.k_proj.weight": "pytorch_model_00061-of-00063.bin", + "model.layers.59.self_attn.o_proj.weight": "pytorch_model_00061-of-00063.bin", + "model.layers.59.self_attn.q_proj.weight": "pytorch_model_00061-of-00063.bin", + "model.layers.59.self_attn.v_proj.weight": "pytorch_model_00061-of-00063.bin", + "model.layers.6.input_layernorm.weight": "pytorch_model_00008-of-00063.bin", + "model.layers.6.mlp.down_proj.weight": "pytorch_model_00008-of-00063.bin", + "model.layers.6.mlp.gate_proj.weight": "pytorch_model_00008-of-00063.bin", + "model.layers.6.mlp.up_proj.weight": "pytorch_model_00008-of-00063.bin", + "model.layers.6.post_attention_layernorm.weight": "pytorch_model_00008-of-00063.bin", + "model.layers.6.self_attn.k_proj.weight": "pytorch_model_00008-of-00063.bin", + "model.layers.6.self_attn.o_proj.weight": "pytorch_model_00008-of-00063.bin", + "model.layers.6.self_attn.q_proj.weight": "pytorch_model_00008-of-00063.bin", + "model.layers.6.self_attn.v_proj.weight": "pytorch_model_00008-of-00063.bin", + "model.layers.7.input_layernorm.weight": "pytorch_model_00009-of-00063.bin", + "model.layers.7.mlp.down_proj.weight": "pytorch_model_00009-of-00063.bin", + "model.layers.7.mlp.gate_proj.weight": "pytorch_model_00009-of-00063.bin", + "model.layers.7.mlp.up_proj.weight": "pytorch_model_00009-of-00063.bin", + "model.layers.7.post_attention_layernorm.weight": "pytorch_model_00009-of-00063.bin", + "model.layers.7.self_attn.k_proj.weight": "pytorch_model_00009-of-00063.bin", + "model.layers.7.self_attn.o_proj.weight": "pytorch_model_00009-of-00063.bin", + "model.layers.7.self_attn.q_proj.weight": "pytorch_model_00009-of-00063.bin", + "model.layers.7.self_attn.v_proj.weight": "pytorch_model_00009-of-00063.bin", + "model.layers.8.input_layernorm.weight": "pytorch_model_00010-of-00063.bin", + "model.layers.8.mlp.down_proj.weight": "pytorch_model_00010-of-00063.bin", + "model.layers.8.mlp.gate_proj.weight": "pytorch_model_00010-of-00063.bin", + "model.layers.8.mlp.up_proj.weight": "pytorch_model_00010-of-00063.bin", + "model.layers.8.post_attention_layernorm.weight": "pytorch_model_00010-of-00063.bin", + "model.layers.8.self_attn.k_proj.weight": "pytorch_model_00010-of-00063.bin", + "model.layers.8.self_attn.o_proj.weight": "pytorch_model_00010-of-00063.bin", + "model.layers.8.self_attn.q_proj.weight": "pytorch_model_00010-of-00063.bin", + "model.layers.8.self_attn.v_proj.weight": "pytorch_model_00010-of-00063.bin", + "model.layers.9.input_layernorm.weight": "pytorch_model_00011-of-00063.bin", + "model.layers.9.mlp.down_proj.weight": "pytorch_model_00011-of-00063.bin", + "model.layers.9.mlp.gate_proj.weight": "pytorch_model_00011-of-00063.bin", + "model.layers.9.mlp.up_proj.weight": "pytorch_model_00011-of-00063.bin", + "model.layers.9.post_attention_layernorm.weight": "pytorch_model_00011-of-00063.bin", + "model.layers.9.self_attn.k_proj.weight": "pytorch_model_00011-of-00063.bin", + "model.layers.9.self_attn.o_proj.weight": "pytorch_model_00011-of-00063.bin", + "model.layers.9.self_attn.q_proj.weight": "pytorch_model_00011-of-00063.bin", + "model.layers.9.self_attn.v_proj.weight": "pytorch_model_00011-of-00063.bin", + "model.norm.weight": "pytorch_model_00062-of-00063.bin" + } +} diff --git a/pytorch_model_00001-of-00063.bin b/pytorch_model_00001-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..2a26e12f69e8b164dfc8e7ba4e2f639778dcdf51 --- /dev/null +++ b/pytorch_model_00001-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:39f9dead6e3f538b2d63d1ea23ecf9a8bd8e09040dab85f26597e33261db239a +size 1059062698 diff --git a/pytorch_model_00002-of-00063.bin b/pytorch_model_00002-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..58ff18b998699cb5732d2ef224b90183f674fe56 --- /dev/null +++ b/pytorch_model_00002-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:852ed639606b5c93c6bbc09f630100fae17aff694dd57eefe954c44271869cf9 +size 634412402 diff --git a/pytorch_model_00003-of-00063.bin b/pytorch_model_00003-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..5a909470456ab2dd509d601439ad5d6fa913eda1 --- /dev/null +++ b/pytorch_model_00003-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6d52f935ec2443c708b57f89361bba899d52276fcb15fe9f37e31b68984cc838 +size 634412402 diff --git a/pytorch_model_00004-of-00063.bin b/pytorch_model_00004-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..7cbffe32fa167e2617ac4e464864bdd19aacf5ce --- /dev/null +++ b/pytorch_model_00004-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ce35c39f4311a56f766df7ff25f82335b968eeaf9c55ba8194b5f0f20ff40bd2 +size 634412402 diff --git a/pytorch_model_00005-of-00063.bin b/pytorch_model_00005-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..42cd0cd1eb7d4412b692c81ef97efcb3aecdb258 --- /dev/null +++ b/pytorch_model_00005-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:401d67eff9d947ec98b0fadcc9841053ca2c58349bbf58bb7f002b74c04469f8 +size 634412402 diff --git a/pytorch_model_00006-of-00063.bin b/pytorch_model_00006-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..3878f24010e664b4861514a4a3b108f646ad644a --- /dev/null +++ b/pytorch_model_00006-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bdcf87ff676f335d1829ce7501223307d0e763b04d5eeb57b0f234676d9ecedd +size 634412402 diff --git a/pytorch_model_00007-of-00063.bin b/pytorch_model_00007-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..f9ccdb82070b494b1e8d89da73c500851849f4b9 --- /dev/null +++ b/pytorch_model_00007-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d7bab964ff5f334ce5d5f7d0ef19c4887c59c26e42906cf32d74939b9a9f210 +size 634412402 diff --git a/pytorch_model_00008-of-00063.bin b/pytorch_model_00008-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..37bca740ca9c1b5c76c07a55335ad038594c6714 --- /dev/null +++ b/pytorch_model_00008-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c3fcaea87033c42338921c15631cae52a0a04a349a824b57d4a5e078b8c1a8fb +size 634412402 diff --git a/pytorch_model_00009-of-00063.bin b/pytorch_model_00009-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..626848b152b30b00d180fd0db31e691a3cfbd199 --- /dev/null +++ b/pytorch_model_00009-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a98ed446d25300b79af845bbaa4d0e0845b02a7bd66aebb12b2c1c44a599e901 +size 634412402 diff --git a/pytorch_model_00010-of-00063.bin b/pytorch_model_00010-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..44f885de1a14c15fb5a20ba3394d7c7e51554658 --- /dev/null +++ b/pytorch_model_00010-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:828ee51e1e2110432fdda3b24ecb412d3fd7f5107177ecbe78f87462d89af423 +size 634412402 diff --git a/pytorch_model_00011-of-00063.bin b/pytorch_model_00011-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..80e2a397be7d0b977d42b1da1299289148f4eb16 --- /dev/null +++ b/pytorch_model_00011-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:48cf2884fd04a4d87c3c53bbcfa73115c8d0b14c068d6b377ad3c4b9634d6c93 +size 634412402 diff --git a/pytorch_model_00012-of-00063.bin b/pytorch_model_00012-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..8f299ca954140006a14252d632f42c11bbe8431b --- /dev/null +++ b/pytorch_model_00012-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:972253da4ce9b15dae450f5d84811856e33f11b3cd813d6ad0f0a244d706c447 +size 634412402 diff --git a/pytorch_model_00013-of-00063.bin b/pytorch_model_00013-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..2afe23abbb0234f3987b811dcc869d774067cb73 --- /dev/null +++ b/pytorch_model_00013-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d98fff8448905f26ee00964dae254956cb30d39048da31292bdee5bdc32ffb5b +size 634412402 diff --git a/pytorch_model_00014-of-00063.bin b/pytorch_model_00014-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..8c97dce83f0b13e0ba9da76047b6cfac75c17077 --- /dev/null +++ b/pytorch_model_00014-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d89eed551bd73730e8b6be77f7068be4904ef91518b30713232dc9e7215f5127 +size 634412402 diff --git a/pytorch_model_00015-of-00063.bin b/pytorch_model_00015-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..3af5c3c119859ef617d0814c29e735839ab5a503 --- /dev/null +++ b/pytorch_model_00015-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6f6ec34171ee45e853f7fc59aa934d663bfaa1b1d58eb168f869d0603c0ecf34 +size 634412402 diff --git a/pytorch_model_00016-of-00063.bin b/pytorch_model_00016-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..96398e43b1bfbada67091fafd29cfe5ea86628b9 --- /dev/null +++ b/pytorch_model_00016-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d75a0a3b0bdf69cab705c77146a5cfb677f06ebea11e0bcad0178de917ec3e70 +size 634412402 diff --git a/pytorch_model_00017-of-00063.bin b/pytorch_model_00017-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..536ec70c13855208ac6b7a807d306d21b81cca90 --- /dev/null +++ b/pytorch_model_00017-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6465d7e8bd797a2a9c3ef25577e555d52bf835536534cadcb9093e539086bfda +size 634412402 diff --git a/pytorch_model_00018-of-00063.bin b/pytorch_model_00018-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..6bc5bf1437dfd1a3f74fe5fb3a1a5d3c5f835b38 --- /dev/null +++ b/pytorch_model_00018-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9585dd9c191b08af8cccecac479743fce2c70dbb786bba4fd207edfca7e424ec +size 634412402 diff --git a/pytorch_model_00019-of-00063.bin b/pytorch_model_00019-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..27344b70ed42bfe7ab7b91fbbc615e175c822d2b --- /dev/null +++ b/pytorch_model_00019-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fef9c04367e52191415c7de6d5366353886facd1ffa32451d5f4c0f575031048 +size 634412402 diff --git a/pytorch_model_00020-of-00063.bin b/pytorch_model_00020-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..786fa2e20d0b4a644aca270cd26c56cc13665fe7 --- /dev/null +++ b/pytorch_model_00020-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c3a95880526513486ad3d19ce90eb0db7244f78bde539d8809bed81367d76fcf +size 634412402 diff --git a/pytorch_model_00021-of-00063.bin b/pytorch_model_00021-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..067a61bd5afce3320707cd2b7fd2fdab0462d5f7 --- /dev/null +++ b/pytorch_model_00021-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d3f348e11aa2ee98b8c99eb5a00fa6d433eaf7cf3569f264657c3cb48bb3378b +size 634412402 diff --git a/pytorch_model_00022-of-00063.bin b/pytorch_model_00022-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..f40e48cffc4d56afda27b8881c7fe33e697b145f --- /dev/null +++ b/pytorch_model_00022-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:50d891fdcb387573e12cd77794f84e1ebaa21c3533b853de5f8c755053f10de4 +size 634412402 diff --git a/pytorch_model_00023-of-00063.bin b/pytorch_model_00023-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..e170e9deddd7e3394f8278e86cb3129937229983 --- /dev/null +++ b/pytorch_model_00023-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8528e46b62bda3813575b0dc1db23d2397dab8d9d677410e562eae00f66f3b06 +size 634412402 diff --git a/pytorch_model_00024-of-00063.bin b/pytorch_model_00024-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..ff594032c5a92bff078d89264005607cf75e4f51 --- /dev/null +++ b/pytorch_model_00024-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3cdcebb1fc935a2b24bbb192eafbcaf28ca5a09112e824e179e71276eee393d2 +size 634412402 diff --git a/pytorch_model_00025-of-00063.bin b/pytorch_model_00025-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..5b6448da984d8cf8b65b052cf6306c9c230486a0 --- /dev/null +++ b/pytorch_model_00025-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f0ab1bd69dcd37dbf396755ef7ee6be2ec47447135c71952cb1b30ecc1fd1a7d +size 634412402 diff --git a/pytorch_model_00026-of-00063.bin b/pytorch_model_00026-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..a58028044771aef3957b4a664905f9564bb38e05 --- /dev/null +++ b/pytorch_model_00026-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:329b5562cc148c3294ea3d9149613d3cbca654ed491962196b39e76eca5fa444 +size 634412402 diff --git a/pytorch_model_00027-of-00063.bin b/pytorch_model_00027-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..0f3309b09f4f37eaf567d37dab3ec6d42e2af09c --- /dev/null +++ b/pytorch_model_00027-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f20009910845f8dddc124750c3c3ec3ad73fb3a64afebe6e20941bdf2d5bfd5e +size 634412402 diff --git a/pytorch_model_00028-of-00063.bin b/pytorch_model_00028-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..8d0507cb96cf1fb8d4b1ce7c4a4d782326aa892b --- /dev/null +++ b/pytorch_model_00028-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c05b26b969a9dfe5781c78bb7e834103f30472b59de1803d64f6323eb178969a +size 634412402 diff --git a/pytorch_model_00029-of-00063.bin b/pytorch_model_00029-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..5dd80955e58b75d0279769495b10a7cbd6bee291 --- /dev/null +++ b/pytorch_model_00029-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:788bd6369c3aea577bb461a6dbb1aeb7594b1a2a2064b41dbc39759662211168 +size 634412402 diff --git a/pytorch_model_00030-of-00063.bin b/pytorch_model_00030-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..b62445107b7fcce211a2e932def30ad6a1e56770 --- /dev/null +++ b/pytorch_model_00030-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59c7c7731f0bf93315aee15ee9ed3a9dae9738c33ab240884380cc2f725b10c1 +size 634412402 diff --git a/pytorch_model_00031-of-00063.bin b/pytorch_model_00031-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..3f3dc8282efa805522bd165515bc8bcb031e54f5 --- /dev/null +++ b/pytorch_model_00031-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6c7aff2bd7e2bc8245cecba957a1763d8db017bbadb304d56593f0f89dd5b51a +size 634412402 diff --git a/pytorch_model_00032-of-00063.bin b/pytorch_model_00032-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..930451718c16abdf2233bbd25e712fa7c7667d36 --- /dev/null +++ b/pytorch_model_00032-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9fa950b1d782cc26d4e2c093df43327c8a25d17408f6481e57c66580914fec9a +size 634412402 diff --git a/pytorch_model_00033-of-00063.bin b/pytorch_model_00033-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..e16711205c944e877aed528b3202fccc9fac65fe --- /dev/null +++ b/pytorch_model_00033-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51b1d01210876eb46f06a3c6c033361935cb6d4fc26e21a91b14cd0210867ddd +size 634412402 diff --git a/pytorch_model_00034-of-00063.bin b/pytorch_model_00034-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..a0820c2df89b73b01b5f864a30bdf59df71a7277 --- /dev/null +++ b/pytorch_model_00034-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:56324cacba1c9474f346a8d44402daebb15619f48f53fb3bd4c320c62aa4f8a9 +size 634412402 diff --git a/pytorch_model_00035-of-00063.bin b/pytorch_model_00035-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..9211f3b8b445afb7a8656fbcd16276f1a2999890 --- /dev/null +++ b/pytorch_model_00035-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a43ef7eca243a7423395ae44cd026c433f161a834c98cf5be2c021d1dcc165d +size 634412402 diff --git a/pytorch_model_00036-of-00063.bin b/pytorch_model_00036-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..8ac38cd4d5d5dc357b5899dff547b146369befee --- /dev/null +++ b/pytorch_model_00036-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:52646da5ecc6bca699c3bd7700270e4c657d7e02ddb082d9a93f784f307137d8 +size 634412402 diff --git a/pytorch_model_00037-of-00063.bin b/pytorch_model_00037-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..e5241e48c3f2ebded3732a013554a021d89e3a8f --- /dev/null +++ b/pytorch_model_00037-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e1c0d28251fb28ba541d95291c04c7f2e8aa98b78fd57a0d615e43bc605de823 +size 634412402 diff --git a/pytorch_model_00038-of-00063.bin b/pytorch_model_00038-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..a89f3138c91838ca54c1aedfb62d4cded508f419 --- /dev/null +++ b/pytorch_model_00038-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:88efbd7ec2d9bf0c471f4d1fa91afca8ed4188fa3092d68b3de6eee2b60ce875 +size 634412402 diff --git a/pytorch_model_00039-of-00063.bin b/pytorch_model_00039-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..464fb1d54aa8c4b675e7ee735ba87b4bb452a8e2 --- /dev/null +++ b/pytorch_model_00039-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f9b79cabc4589c456449436e4e9385c12a37180cccf32751fd38835935f09c5 +size 634412402 diff --git a/pytorch_model_00040-of-00063.bin b/pytorch_model_00040-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..1dabddc4b50400d8d78ecbdc976b44d45688c2df --- /dev/null +++ b/pytorch_model_00040-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3e728a0ae5006e554ea79d3798ab3367e6e999d197c63045e9d4124b8b621453 +size 634412402 diff --git a/pytorch_model_00041-of-00063.bin b/pytorch_model_00041-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..74a9c315ff63ba4d2f71ef59ce7d5e8aca8e6d72 --- /dev/null +++ b/pytorch_model_00041-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5535d1bf1b44c4136db6f03f66f02255e1223989fd595bbc7bb27b4f244f2509 +size 634412402 diff --git a/pytorch_model_00042-of-00063.bin b/pytorch_model_00042-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..fd0c72644971cad4ddfdcb45e11adfac85222e91 --- /dev/null +++ b/pytorch_model_00042-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7928743d6b938fa73efdddae8054734e78833959d56be71ffb211ad740ecb590 +size 634412402 diff --git a/pytorch_model_00043-of-00063.bin b/pytorch_model_00043-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..ef0384234e5c0472b22da2656136dde2183b93b0 --- /dev/null +++ b/pytorch_model_00043-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2cb501ce112506780440247afd375dd0bcc1d55486dbb6f62de858f675f06473 +size 634412402 diff --git a/pytorch_model_00044-of-00063.bin b/pytorch_model_00044-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..92714a246177abb33c62ac2b45360f9defb86454 --- /dev/null +++ b/pytorch_model_00044-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aaf3f23e3f5d2a745e12c04b89f42e0746d112839808f4ac50157730279f46d2 +size 634412402 diff --git a/pytorch_model_00045-of-00063.bin b/pytorch_model_00045-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..c1d9bbfe0fa42e225f8b46e281e99998dff6f87b --- /dev/null +++ b/pytorch_model_00045-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa7048e18e8b320d3595176a12ac4a86b56c8871ec3bd4f247cf6dd3b89ff817 +size 634412402 diff --git a/pytorch_model_00046-of-00063.bin b/pytorch_model_00046-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..0289800456bd251ba4736eeb09890ea883a96e7c --- /dev/null +++ b/pytorch_model_00046-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:31d9b4284fac097d681d72f6731b6009ba46cf21cb47dd75510a710c87718ae3 +size 634412402 diff --git a/pytorch_model_00047-of-00063.bin b/pytorch_model_00047-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..53ca5aebfa0307551e23f4a841c8c3ed7ef834a4 --- /dev/null +++ b/pytorch_model_00047-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fc99efbec00c241dc4c3486537f269a5e9602b5e1da257c908d9e2247da38acc +size 634412402 diff --git a/pytorch_model_00048-of-00063.bin b/pytorch_model_00048-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..379edbfc2bd16b9911b056a1a8afb11004114c26 --- /dev/null +++ b/pytorch_model_00048-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3614dc52fd510849c283be5e9d9ae0dede5e1c5dc40d4b81a2b1ad2c7e872d03 +size 634412402 diff --git a/pytorch_model_00049-of-00063.bin b/pytorch_model_00049-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..2ab012ed442a3a2eee603fd2fbc3a3055935cf9e --- /dev/null +++ b/pytorch_model_00049-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d78e5c655882552a3611361642a3d5d6f2e20fbc345c538af9aa1bebe2d9dd47 +size 634412402 diff --git a/pytorch_model_00050-of-00063.bin b/pytorch_model_00050-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..328565a5e620dd318b1e895fd28fa363424bcf90 --- /dev/null +++ b/pytorch_model_00050-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0405163d66fe02616a36a5a3ce7edff2fef1e4d6a67cd57d50950587cd7394d2 +size 634412402 diff --git a/pytorch_model_00051-of-00063.bin b/pytorch_model_00051-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..0ee4e723d5f61fc0d3bfec276a867b8548a3b47a --- /dev/null +++ b/pytorch_model_00051-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:62b993ffc3a775f5183c8e2667303ec3c670ed6757b5e30ce9756c48d17a6f91 +size 634412402 diff --git a/pytorch_model_00052-of-00063.bin b/pytorch_model_00052-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..305db4ba1acf58091b0c7febac9c0f467b4197d5 --- /dev/null +++ b/pytorch_model_00052-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b651626427ef346ebec03e97b0e3476cd42a7d5eeaedcc9b9c05761f700b0160 +size 634412402 diff --git a/pytorch_model_00053-of-00063.bin b/pytorch_model_00053-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..cfbe06eed22fa9eb17ec4bde643357f762e7694a --- /dev/null +++ b/pytorch_model_00053-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51474b21457988115dd7876211ad5b31fbfc55b0a263e808d84952ab3def4cf6 +size 634412402 diff --git a/pytorch_model_00054-of-00063.bin b/pytorch_model_00054-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..271269cfeb67b61ecc7ddd418abaa2bbcc4b4b87 --- /dev/null +++ b/pytorch_model_00054-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:96885930fc8478e6c03acf7ee5f673f580e73ff12ce45a1e51540fd528efda07 +size 634412402 diff --git a/pytorch_model_00055-of-00063.bin b/pytorch_model_00055-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..cc61b6aa9f7cf3d504e31e2230aba9c93dafcf87 --- /dev/null +++ b/pytorch_model_00055-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4f150cad27954f99f2f5f7541d50cbe6978dccadbe1556da6e38689a17778363 +size 634412402 diff --git a/pytorch_model_00056-of-00063.bin b/pytorch_model_00056-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..7ef28f174aeb8636face311b29e6bece041d9e07 --- /dev/null +++ b/pytorch_model_00056-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:795262a4eef2af37e02be1da6bda88a8ac71bb9732d06cc99b25834ecd25eb1d +size 634412402 diff --git a/pytorch_model_00057-of-00063.bin b/pytorch_model_00057-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..337132a166e676ff23c56d44193a4ffb67f78d2b --- /dev/null +++ b/pytorch_model_00057-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ddbb1289221d2dd9fc6feb1b55ad52bff3b719419dd0030bb2a174cb0e17c66 +size 634412402 diff --git a/pytorch_model_00058-of-00063.bin b/pytorch_model_00058-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..2f4abf9b11ec342bf4cdcd880d4c8ada000a2e47 --- /dev/null +++ b/pytorch_model_00058-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:88bc725b3984fd163623616d785420153dd0142609cf243f03718ec17466e2a8 +size 634412402 diff --git a/pytorch_model_00059-of-00063.bin b/pytorch_model_00059-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..08d75623b53008c388b362a4d0522a3db155ec80 --- /dev/null +++ b/pytorch_model_00059-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5853210665eac138078fd7e337f3709ed67a7c840ffc70016a524856cb45bee2 +size 634412402 diff --git a/pytorch_model_00060-of-00063.bin b/pytorch_model_00060-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..bd2a53bd7ea2dcd7b7d35ea68f096652ad9dc1d5 --- /dev/null +++ b/pytorch_model_00060-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a210c8093cfb9b4fef1ad71501a2f56a0891c00bb4b191f9a5455f5a2fdd3a7c +size 634412402 diff --git a/pytorch_model_00061-of-00063.bin b/pytorch_model_00061-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..9a1892ec54d18e9fc75630993011f11983de0ec8 --- /dev/null +++ b/pytorch_model_00061-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70b9adc605615d0f1f2e49da0ea67bf06076c820be52e9cf2e583bf0e418e134 +size 634412402 diff --git a/pytorch_model_00062-of-00063.bin b/pytorch_model_00062-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..af66bd5823eb239dfa3d3fb044f8b4680d4bab6c --- /dev/null +++ b/pytorch_model_00062-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:47faf3e8a03dd92497f27fa4e993c9360ce69f76ef7c207a7ebe5e74cd38a62e +size 11178 diff --git a/pytorch_model_00063-of-00063.bin b/pytorch_model_00063-of-00063.bin new file mode 100644 index 0000000000000000000000000000000000000000..bd4d0656975c58f99803e0347a4480eb3f113d64 --- /dev/null +++ b/pytorch_model_00063-of-00063.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cf0da8ff3a0725010fc1d220e7b746df3a97c09947fd12241d1c9e831425b9c7 +size 1059062698 diff --git a/special_tokens_map.json b/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..27ffdad9fe3f7d432eeacfdf51cb3259fe2b62a4 --- /dev/null +++ b/special_tokens_map.json @@ -0,0 +1,10 @@ +{ + "additional_special_tokens": [ + "<|modelname|>", + "<|modelorg|>" + ], + "bos_token": "", + "eos_token": "", + "pad_token": "", + "unk_token": "" +} diff --git a/tokenization_internlm.py b/tokenization_internlm.py new file mode 100644 index 0000000000000000000000000000000000000000..5ce1e66c330b09341a6465bc5514a12c92b5f478 --- /dev/null +++ b/tokenization_internlm.py @@ -0,0 +1,242 @@ +# coding=utf-8 +# Copyright 2022 EleutherAI and the HuggingFace Inc. team. All rights reserved. +# +# This code is based on EleutherAI's GPT-NeoX library and the GPT-NeoX +# and OPT implementations in this library. It has been modified from its +# original forms to accommodate minor architectural differences compared +# to GPT-NeoX and OPT used by the Meta AI team that trained the model. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tokenization classes for IntermLM.""" +import os +from shutil import copyfile +from typing import Any, Dict, List, Optional, Tuple + +import sentencepiece as spm + +from transformers.tokenization_utils import PreTrainedTokenizer +from transformers.utils import logging + + +logger = logging.get_logger(__name__) + +VOCAB_FILES_NAMES = {"vocab_file": "./tokenizer.model"} + +PRETRAINED_VOCAB_FILES_MAP = {} + + +class InternLMTokenizer(PreTrainedTokenizer): + """ + Construct a InternLM tokenizer. Based on byte-level Byte-Pair-Encoding. + + Args: + vocab_file (`str`): + Path to the vocabulary file. + """ + + vocab_files_names = VOCAB_FILES_NAMES + pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP + model_input_names = ["input_ids", "attention_mask"] + _auto_class = "AutoTokenizer" + + def __init__( + self, + vocab_file, + unk_token="", + bos_token="", + eos_token="", + pad_token="", + sp_model_kwargs: Optional[Dict[str, Any]] = None, + add_bos_token=True, + add_eos_token=False, + decode_with_prefix_space=False, + clean_up_tokenization_spaces=False, + **kwargs, + ): + self.sp_model_kwargs = {} if sp_model_kwargs is None else sp_model_kwargs + self.vocab_file = vocab_file + self.add_bos_token = add_bos_token + self.add_eos_token = add_eos_token + self.decode_with_prefix_space = decode_with_prefix_space + self.sp_model = spm.SentencePieceProcessor(**self.sp_model_kwargs) + self.sp_model.Load(vocab_file) + self._no_prefix_space_tokens = None + super().__init__( + bos_token=bos_token, + eos_token=eos_token, + unk_token=unk_token, + pad_token=pad_token, + clean_up_tokenization_spaces=clean_up_tokenization_spaces, + **kwargs, + ) + + """ Initialization""" + + @property + def no_prefix_space_tokens(self): + if self._no_prefix_space_tokens is None: + vocab = self.convert_ids_to_tokens(list(range(self.vocab_size))) + self._no_prefix_space_tokens = {i for i, tok in enumerate(vocab) if not tok.startswith("▁")} + return self._no_prefix_space_tokens + + @property + def vocab_size(self): + """Returns vocab size""" + return self.sp_model.get_piece_size() + + @property + def bos_token_id(self) -> Optional[int]: + return self.sp_model.bos_id() + + @property + def eos_token_id(self) -> Optional[int]: + return self.sp_model.eos_id() + + def get_vocab(self): + """Returns vocab as a dict""" + vocab = {self.convert_ids_to_tokens(i): i for i in range(self.vocab_size)} + vocab.update(self.added_tokens_encoder) + return vocab + + def _tokenize(self, text): + """Returns a tokenized string.""" + return self.sp_model.encode(text, out_type=str) + + def _convert_token_to_id(self, token): + """Converts a token (str) in an id using the vocab.""" + return self.sp_model.piece_to_id(token) + + def _convert_id_to_token(self, index): + """Converts an index (integer) in a token (str) using the vocab.""" + token = self.sp_model.IdToPiece(index) + return token + + def _maybe_add_prefix_space(self, tokens, decoded): + if tokens and tokens[0] not in self.no_prefix_space_tokens: + return " " + decoded + else: + return decoded + + def convert_tokens_to_string(self, tokens): + """Converts a sequence of tokens (string) in a single string.""" + current_sub_tokens = [] + out_string = "" + prev_is_special = False + for token in tokens: + # make sure that special tokens are not decoded using sentencepiece model + if token in self.all_special_tokens: + if not prev_is_special: + out_string += " " + out_string += self.sp_model.decode(current_sub_tokens) + token + prev_is_special = True + current_sub_tokens = [] + else: + current_sub_tokens.append(token) + prev_is_special = False + out_string += self.sp_model.decode(current_sub_tokens) + out_string = self.clean_up_tokenization(out_string) + out_string = self._maybe_add_prefix_space(tokens=tokens, decoded=out_string) + return out_string[1:] + + def save_vocabulary(self, save_directory, filename_prefix: Optional[str] = None) -> Tuple[str]: + """ + Save the vocabulary and special tokens file to a directory. + + Args: + save_directory (`str`): + The directory in which to save the vocabulary. + + Returns: + `Tuple(str)`: Paths to the files saved. + """ + if not os.path.isdir(save_directory): + logger.error(f"Vocabulary path ({save_directory}) should be a directory") + return + out_vocab_file = os.path.join( + save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] + ) + + if os.path.abspath(self.vocab_file) != os.path.abspath(out_vocab_file) and os.path.isfile(self.vocab_file): + copyfile(self.vocab_file, out_vocab_file) + elif not os.path.isfile(self.vocab_file): + with open(out_vocab_file, "wb") as fi: + content_spiece_model = self.sp_model.serialized_model_proto() + fi.write(content_spiece_model) + + return (out_vocab_file,) + + def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None): + if self.add_bos_token: + bos_token_ids = [self.bos_token_id] + else: + bos_token_ids = [] + + output = bos_token_ids + token_ids_0 + + if token_ids_1 is not None: + output = output + token_ids_1 + + if self.add_eos_token: + output = output + [self.eos_token_id] + + return output + + def get_special_tokens_mask( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False + ) -> List[int]: + """ + Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding + special tokens using the tokenizer `prepare_for_model` method. + + Args: + token_ids_0 (`List[int]`): + List of IDs. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + already_has_special_tokens (`bool`, *optional*, defaults to `False`): + Whether or not the token list is already formatted with special tokens for the model. + + Returns: + `List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token. + """ + if already_has_special_tokens: + return super().get_special_tokens_mask( + token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True + ) + + if token_ids_1 is None: + return [1] + ([0] * len(token_ids_0)) + [1] + return [1] + ([0] * len(token_ids_0)) + [1, 1] + ([0] * len(token_ids_1)) + [1] + + def create_token_type_ids_from_sequences( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None + ) -> List[int]: + """ + Create a mask from the two sequences passed to be used in a sequence-pair classification task. T5 does not make + use of token type ids, therefore a list of zeros is returned. + + Args: + token_ids_0 (`List[int]`): + List of IDs. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + + Returns: + `List[int]`: List of zeros. + """ + eos = [self.eos_token_id] + + if token_ids_1 is None: + return len(token_ids_0 + eos) * [0] + return len(token_ids_0 + eos + token_ids_1 + eos) * [0] \ No newline at end of file diff --git a/tokenizer.model b/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..24f4d0607b1f6a966a5d653bb255813638de0bec --- /dev/null +++ b/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aab622d98c98677a1a51f969e25765154487bf3e85c7819db105db2fcacba83f +size 1658691 diff --git a/tokenizer_config.json b/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..c0a265365c0adc7a308ceaee6336facfe1410de8 --- /dev/null +++ b/tokenizer_config.json @@ -0,0 +1,20 @@ +{ + "additional_special_tokens": [ + "<|modelname|>", + "<|modelorg|>" + ], + "auto_map": { + "AutoTokenizer": [ + "tokenization_internlm.InternLMTokenizer", + null + ] + }, + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "padding_side": "right", + "tokenizer_class": "InternLMTokenizer", + "unk_token": "" +}