ChenWu98 commited on
Commit
ce61924
1 Parent(s): 256a2c2

Training in progress, epoch 0

Browse files
adapter_config.json CHANGED
@@ -19,13 +19,13 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "v_proj",
23
- "gate_proj",
24
  "o_proj",
 
 
25
  "k_proj",
26
- "q_proj",
27
  "up_proj",
28
- "down_proj"
29
  ],
30
  "task_type": "CAUSAL_LM"
31
  }
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
 
22
  "o_proj",
23
+ "down_proj",
24
+ "gate_proj",
25
  "k_proj",
26
+ "v_proj",
27
  "up_proj",
28
+ "q_proj"
29
  ],
30
  "task_type": "CAUSAL_LM"
31
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a5ef8afaffc00a0a1ed24e6bf2eb5020016451e603284826da6f8d2a45dc00a7
3
  size 83946192
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:054c3b9872e6d3d7e0a3b9501878db8122b3951168b24c630f2fa6ebddec64d9
3
  size 83946192
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c507c7139f6ecd836dacd1b5db7cfff68767efc1e8ff6754fd31a5df7f0db554
3
  size 6008
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f1a78a2edc255c2d3a023bd58368ec3c37848d7ee2a8e9aa629447c9d6aa343c
3
  size 6008