davidshtian
commited on
Commit
•
c5a031e
1
Parent(s):
27015bc
Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +10 -0
- checkpoint/config.json +26 -0
- checkpoint/generation_config.json +6 -0
- checkpoint/pytorch_model.bin/key_to_filename.json +3 -0
- checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight +3 -0
- checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight +3 -0
- checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight +3 -0
- checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight +3 -0
.gitattributes
CHANGED
@@ -33,3 +33,13 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
compiled/21d88bdc0c070aa9ee8a.neff filter=lfs diff=lfs merge=lfs -text
|
37 |
+
compiled/3e177a37534e1c3acbfc.neff filter=lfs diff=lfs merge=lfs -text
|
38 |
+
compiled/461c30cc205476b74779.neff filter=lfs diff=lfs merge=lfs -text
|
39 |
+
compiled/4780b67d630153e50b6b.neff filter=lfs diff=lfs merge=lfs -text
|
40 |
+
compiled/530064e106010e9b765e.neff filter=lfs diff=lfs merge=lfs -text
|
41 |
+
compiled/9c62d748f7c8575d09e8.neff filter=lfs diff=lfs merge=lfs -text
|
42 |
+
compiled/a0114d0e2b58a441a4c9.neff filter=lfs diff=lfs merge=lfs -text
|
43 |
+
compiled/a9ff50609bfe030b2b47.neff filter=lfs diff=lfs merge=lfs -text
|
44 |
+
compiled/d268415c65f8285e7c6e.neff filter=lfs diff=lfs merge=lfs -text
|
45 |
+
compiled/f98e7218439b36b6c070.neff filter=lfs diff=lfs merge=lfs -text
|
checkpoint/config.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
|
3 |
+
"architectures": [
|
4 |
+
"MistralForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 1,
|
8 |
+
"eos_token_id": 2,
|
9 |
+
"hidden_act": "silu",
|
10 |
+
"hidden_size": 4096,
|
11 |
+
"initializer_range": 0.02,
|
12 |
+
"intermediate_size": 14336,
|
13 |
+
"max_position_embeddings": 32768,
|
14 |
+
"model_type": "mistral",
|
15 |
+
"num_attention_heads": 32,
|
16 |
+
"num_hidden_layers": 32,
|
17 |
+
"num_key_value_heads": 8,
|
18 |
+
"rms_norm_eps": 1e-05,
|
19 |
+
"rope_theta": 1000000.0,
|
20 |
+
"sliding_window": null,
|
21 |
+
"tie_word_embeddings": false,
|
22 |
+
"torch_dtype": "bfloat16",
|
23 |
+
"transformers_version": "4.36.2",
|
24 |
+
"use_cache": true,
|
25 |
+
"vocab_size": 32000
|
26 |
+
}
|
checkpoint/generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 1,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"transformers_version": "4.36.2"
|
6 |
+
}
|
checkpoint/pytorch_model.bin/key_to_filename.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:825d20f4a18183eff3963e805edd13ef7eb35b0aff7a850e8153ca1eeeb37970
|
3 |
+
size 26397
|
checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:34769e631e679aeadf54413c6c2da66c6d330a6cf766eddfdfb9bc12ce0e6fee
|
3 |
+
size 262144789
|
checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:93c0be773793187488d1f220260b09a939c18a59df70bcfa141eb38a51bbf668
|
3 |
+
size 33555324
|
checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:40d029c49bf764ddbd294b1a6df28eabdcc3f7b9ad8b1af63b52b0ee8a495d22
|
3 |
+
size 33555327
|
checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2911cb42dfee9837653073d17f87e2b44d2ba55d923ba96945c251186939fe6e
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:69aa66df87d22bd39a56457b8129bf54898f9b0d44f6e97f1c8d079711c3790a
|
3 |
+
size 8389573
|
checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e78ec4e12f40c3ded44bbfb8b90a288a839b4d2b55322ea931ead1eed886ad06
|
3 |
+
size 8389573
|
checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6af2773747960981e6339b5d274dd74cbbb2a91f5ecebec69e837e9547fa56ea
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5533d25fa3e6aee261d883217afdeac77620d3e608deccadf8a27c87bdee847a
|
3 |
+
size 117441404
|
checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f37a9ccb2266856c63cb5ea9e85342b9bd52433c757d2b7dd0ef7474ed64d9c3
|
3 |
+
size 117441398
|
checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1efd21b6d4eac97992e2831f9d0db0b63287255a8f45bfef040a1f7d19539740
|
3 |
+
size 117441404
|
checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:882269e0fa57b8ea8856f128004debf36672cb8a31e6c78b6286897a61eb2ea1
|
3 |
+
size 9090
|
checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:81c1724525bfd33aa5991693554c374e3506c94adba22839b1146c677e660221
|
3 |
+
size 9181
|
checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a936cd8159e2499531d0b4428fd4fa66d48d6663d29f7a87ccb9089df273a032
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6ee16015f7ad545753a38763f7561002270871b137bae7de8d33f8f0b7f7cf8b
|
3 |
+
size 8389503
|
checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4a3698304cee04ce4db8902fc1e2582872a64ba516f9189e8e107d07feb97449
|
3 |
+
size 8389573
|
checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:02e3456e01e2526a510891d17cf44621d2ce03937630f805e8580c6b40c8cb52
|
3 |
+
size 8389573
|
checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d5e93b9978f8bf399a0a9ea4dec35961282ce74a58b5a461bde362757415a029
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ebbf05c3c6850cbf475672b616e4cb1acff98ea2d94267e800f1710241429b3d
|
3 |
+
size 117441404
|
checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1cd30e661c7c11b35800a6f4e295d464a26a653c3372909e0d930bee4dcfa957
|
3 |
+
size 117441398
|
checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e738e558e0ce20308a6672cc4624c5cec9ee835740791e7e06966c04a7462784
|
3 |
+
size 117441404
|
checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6c4232366f2170a467f6d418146fc770b7dbfa674a21ea8330a25a96f4acb2ab
|
3 |
+
size 9090
|
checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ddf4920888de2fa1c6a22e68769ad2459de22e72e9583f479db1a950e3eafee6
|
3 |
+
size 9181
|
checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b38a3831fe6176cf8982789deb0f691f0876f81b06c39a0c298cf3c5f45f9fb1
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c18bd22fff5e7b4e05a4127802d94e404652c097e59873cdcd7d503e39f490a9
|
3 |
+
size 8389573
|
checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:15e14436c8e8acc0770ee913b0f93931d2fdc726b2f8b8e5ec0ce5380219e0e5
|
3 |
+
size 8389503
|
checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:34f182d5458d93a4496d1f4eaaefeab32ee32cdb9d4db14f4adb3a7d867db51a
|
3 |
+
size 8389573
|
checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:28712782690690b34dc284496dffd39a97d1adeb11dbc21f3ae9dd3b9d41559c
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dedddadb3d139d4a24a4a586cc1145f5db00c286a131b4682efb1a958992ded5
|
3 |
+
size 117441404
|
checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:009b284a078933fffd03a21137f6562cacd5460c9a1d7f69744fc9406629d73c
|
3 |
+
size 117441398
|
checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:24685ab4525c51aa4266a67b61dec414837fa9e8cbc1987c54009177d41358f1
|
3 |
+
size 117441404
|
checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9d929fe16814e56f5de9bffb52276d6487838c0938ca0ba7af85f221b432f3dc
|
3 |
+
size 9090
|
checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d0e816b8d2ae916d7ba386dbe6a9d674a1dc88eac3e14f722d6ee366286efaa7
|
3 |
+
size 9181
|
checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0f30ce43cd9aad9c4f1a456f2f3bc6be69e9dde5127a1bdce314f7318aaeeabd
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9748d9a54ba258bc2b8ecdde60cb2151eab5a92352c8adea31030555c39ef068
|
3 |
+
size 8389573
|
checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:22f65d49dc4b6977cbe60752ac3db4d882cd207ad988b48c66b6f6cde95d59cf
|
3 |
+
size 8389573
|
checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5f8609781d462b75ab073210aa1e94b20df9354353916c5bfae5a7559fc89a02
|
3 |
+
size 33555327
|
checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fcee44327496ce3b0a31ba584e4744f2868d5fb2dec4a550d8079b6207a4f1f0
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:381afcd392d48adc4c8afb8a59b601afd2de43057ea9ec8fcca6e4390891fcc7
|
3 |
+
size 117441404
|
checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:222c00bc3da686b380dd50f5b7cd5a5ac5d4bdb4713783fe2bebbeb4617c81d2
|
3 |
+
size 117441398
|
checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:99a84a288b13b2d90248c98073ed6f9ac6c49dec72203a8c3d5375690c995084
|
3 |
+
size 117441404
|
checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d1bf2e4971e9df43c97b39ee7fdc4021f12f448e1e2df9ff524368790aec9adf
|
3 |
+
size 9090
|
checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:65250f12cc63fa93d26a547169f67d36978ce9716afb76703ea84d7e65bae4db
|
3 |
+
size 9181
|
checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fd1386378b8c361f408fe4027299d56642d53087857b2e88dce4b579ddeabbe7
|
3 |
+
size 33555397
|
checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:438f719aba30ee40253deb71b7d9762a55317ab57cdbe1dcbeb1380eaf98c578
|
3 |
+
size 8389573
|
checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d52957ead89c3fb8bdd6fc7f341fe87549366f5958c0f310164ca6a902bc83b1
|
3 |
+
size 8389573
|
checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:130d60709c0a61b72c7fda337722a5b66ed81d06741ef8d0390ba50a45856833
|
3 |
+
size 33555397
|