breathemm commited on
Commit
91b0f09
1 Parent(s): 2794ef0

Training in progress, step 10

Browse files
adapter_config.json CHANGED
@@ -19,13 +19,13 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "gate_proj",
23
- "q_proj",
24
- "up_proj",
25
- "v_proj",
26
  "k_proj",
 
27
  "down_proj",
28
- "o_proj"
 
 
 
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_dora": false,
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
 
 
 
22
  "k_proj",
23
+ "o_proj",
24
  "down_proj",
25
+ "up_proj",
26
+ "gate_proj",
27
+ "v_proj",
28
+ "q_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3681eda3802f45df388f5ea42cc59745a3d704d9be96288ca8145537fd77789b
3
  size 125918320
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ef8237485b087363254b34f6b0faece78a015b0f1c9b232694baa1e4e1bcc79
3
  size 125918320
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 2048,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a2ccbb1107716c7e57ee01869a14c9ac773cc6381ac44da6ce905ec87ca8849a
3
  size 4856
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3a1ade0c5551568d322b2e557988b7db6ad5cdad7045ecf104e3a3ba65924de
3
  size 4856