ljcnju commited on
Commit
d26fd75
1 Parent(s): 343ba62

Training in progress, epoch 0

Browse files
adapter_config.json CHANGED
@@ -19,8 +19,8 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "v_proj",
23
- "q_proj"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_dora": false,
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "q_proj",
23
+ "v_proj"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f90d6441f906e968e24e2c5eafe0924885a190dfeedada8d65b667e41c122965
3
  size 16794200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1cc634919867a1adaf83e12c7977b12ca9f40e34b9c06e3a0b8f4d3162dcb7a
3
  size 16794200
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:000f9e1abb31dbbe1028a90de5727cce3c3fc08165a092df1078608a63d4581e
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8d6aa000d367a51c4d067e31543d71875ef43bb401f5b40cfb03a14fcb1afa9
3
  size 4728