File size: 1,809 Bytes
36dd9b0
1
{"metadata": {"mergekit_version": "0.0.4.4", "total_size": 2122383360}, "weight_map": {"lm_head.weight": "model-00001-of-00003.safetensors", "transformer.h.0.input_layernorm.bias": "model-00001-of-00003.safetensors", "transformer.h.0.input_layernorm.weight": "model-00001-of-00003.safetensors", "transformer.h.0.mlp.dense_4h_to_h.weight": "model-00001-of-00003.safetensors", "transformer.h.0.mlp.dense_h_to_4h.weight": "model-00001-of-00003.safetensors", "transformer.h.0.self_attention.dense.weight": "model-00001-of-00003.safetensors", "transformer.h.0.self_attention.query_key_value.weight": "model-00001-of-00003.safetensors", "transformer.h.1.input_layernorm.bias": "model-00001-of-00003.safetensors", "transformer.h.1.input_layernorm.weight": "model-00001-of-00003.safetensors", "transformer.h.1.mlp.dense_4h_to_h.weight": "model-00002-of-00003.safetensors", "transformer.h.1.mlp.dense_h_to_4h.weight": "model-00002-of-00003.safetensors", "transformer.h.1.self_attention.dense.weight": "model-00002-of-00003.safetensors", "transformer.h.1.self_attention.query_key_value.weight": "model-00002-of-00003.safetensors", "transformer.h.2.input_layernorm.bias": "model-00002-of-00003.safetensors", "transformer.h.2.input_layernorm.weight": "model-00002-of-00003.safetensors", "transformer.h.2.mlp.dense_4h_to_h.weight": "model-00002-of-00003.safetensors", "transformer.h.2.mlp.dense_h_to_4h.weight": "model-00002-of-00003.safetensors", "transformer.h.2.self_attention.dense.weight": "model-00002-of-00003.safetensors", "transformer.h.2.self_attention.query_key_value.weight": "model-00002-of-00003.safetensors", "transformer.ln_f.bias": "model-00002-of-00003.safetensors", "transformer.ln_f.weight": "model-00002-of-00003.safetensors", "transformer.word_embeddings.weight": "model-00003-of-00003.safetensors"}}