whynlp commited on
Commit
0c149e9
·
verified ·
1 Parent(s): 5c68c3f

Upload LCKVLlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "outputs/tinyllama-lckv-w2-2.5T-ft-100b",
3
  "architectures": [
4
  "LCKVLlamaForCausalLM"
5
  ],
@@ -32,7 +32,7 @@
32
  "rope_theta": 10000.0,
33
  "sliding_window": 4096,
34
  "tie_word_embeddings": false,
35
- "torch_dtype": "float32",
36
  "transformers_version": "4.45.2",
37
  "use_cache": true,
38
  "use_sequential": false,
 
1
  {
2
+ "_name_or_path": "outputs/tinyllama-lckv-w2-ft-100b",
3
  "architectures": [
4
  "LCKVLlamaForCausalLM"
5
  ],
 
32
  "rope_theta": 10000.0,
33
  "sliding_window": 4096,
34
  "tie_word_embeddings": false,
35
+ "torch_dtype": "bfloat16",
36
  "transformers_version": "4.45.2",
37
  "use_cache": true,
38
  "use_sequential": false,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1b32eb4a91de591111f3945f04e59539e9b34f74803a1e4d5b4d910101140ee3
3
- size 4320520376
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e48e3bb3f7c92cfde0951fbb44687802c1cdbce548d94b84f7b38e6f218ead4
3
+ size 2160269576