magniolia commited on
Commit
57c8077
·
verified ·
1 Parent(s): 3b75b8f

Training in progress, step 49

Browse files
adapter_config.json CHANGED
@@ -3,6 +3,7 @@
3
  "auto_mapping": null,
4
  "base_model_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
5
  "bias": "none",
 
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
@@ -20,10 +21,10 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "o_proj",
24
  "q_proj",
25
- "v_proj",
26
- "k_proj"
 
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
 
3
  "auto_mapping": null,
4
  "base_model_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
5
  "bias": "none",
6
+ "exclude_modules": null,
7
  "fan_in_fan_out": false,
8
  "inference_mode": true,
9
  "init_lora_weights": true,
 
21
  "rank_pattern": {},
22
  "revision": null,
23
  "target_modules": [
 
24
  "q_proj",
25
+ "k_proj",
26
+ "o_proj",
27
+ "v_proj"
28
  ],
29
  "task_type": "CAUSAL_LM",
30
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c2f48464e7cc4206332afc5946ab8991ff05657ca84eebb3aad6add609dc19ca
3
  size 218138576
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bef45aecc779b57579854b0b0333e2ca13dea792122d2e7fb5c589794b98d19b
3
  size 218138576
runs/Oct14_19-36-23_ac64fa66bb34/events.out.tfevents.1728934594.ac64fa66bb34.9315.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b02eeb61437faa887f9eaf5bebb9e9470556bdbe114c3f3d60f97c58362ae467
3
+ size 10359
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b915fadb5087938a9e11ab01afa8eb08af0688200400f783baeceb4bf038940a
3
  size 5432
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ba3cc8f5e252986f3d21ad8599e86e0f50fc5a58c81a44657c2c0ea12b05fae
3
  size 5432