xzyao commited on
Commit
fe0481f
·
verified ·
1 Parent(s): 6e2e5bc

Upload folder using huggingface_hub

Browse files
compress_config.json CHANGED
@@ -4,7 +4,7 @@
4
  "sparsity": 0.5,
5
  "damp_percent": 0.01,
6
  "desc_act": false,
7
- "sym": false,
8
  "true_sequential": true,
9
  "lossless": "none",
10
  "prunen": 2,
 
4
  "sparsity": 0.5,
5
  "damp_percent": 0.01,
6
  "desc_act": false,
7
+ "sym": true,
8
  "true_sequential": true,
9
  "lossless": "none",
10
  "prunen": 2,
delta_config.json CHANGED
@@ -1 +1 @@
1
- {"base_model": "meta-llama/Llama-3.2-1B", "compress_config": {"bits": 4, "group_size": -1, "sparsity": 0.5, "damp_percent": 0.01, "desc_act": false, "sym": false, "true_sequential": true, "lossless": "none", "prunen": 2, "prunem": 4, "block_size": 128}, "target_modules": ["self_attn.k_proj", "self_attn.v_proj", "self_attn.q_proj", "self_attn.o_proj", "mlp.up_proj", "mlp.gate_proj", "mlp.down_proj"]}
 
1
+ {"base_model": "meta-llama/Llama-3.2-1B", "compress_config": {"bits": 4, "group_size": -1, "sparsity": 0.5, "damp_percent": 0.01, "desc_act": false, "sym": true, "true_sequential": true, "lossless": "none", "prunen": 2, "prunem": 4, "block_size": 128}, "target_modules": ["self_attn.k_proj", "self_attn.v_proj", "self_attn.q_proj", "self_attn.o_proj", "mlp.up_proj", "mlp.gate_proj", "mlp.down_proj"]}
deltazip-compressed.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9bbcbf73561f6bc5d0a17ea6a2081feed2d1304e87602d8c502d9a5c4bd85576
3
- size 16
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ba6b500732536710e23cc2b3c1d943c87c33a7beb55ac3c077ef27a78af1a72
3
+ size 1416353584