MHGanainy commited on
Commit
67f97d6
1 Parent(s): 20815e6

Training in progress, step 229

Browse files
adapter_config.json CHANGED
@@ -20,8 +20,8 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "c_proj",
24
- "c_attn"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "c_attn",
24
+ "c_proj"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f8acf7cc4839ac4a3197fe098b15e17453427fe9a358d4222d21259c9696ce0d
3
  size 13553648
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7f7b74524ed31039939c72f7e433acb3266a021b690a04daae1e0a43c1087e34
3
  size 13553648
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2a562e8804b3461f7f39670dab8fadebcd7b223e8e7b1a9fa291f963bc5229b1
3
  size 5240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86e27a35922a74d0ef501b43266b25d931e22c1dc59ddd27ff3e2e74552ba442
3
  size 5240