MostafaDorrah commited on
Commit
f60a2cf
Β·
verified Β·
1 Parent(s): 80137cd

Training in progress, step 50

Browse files
adapter_config.json CHANGED
@@ -19,8 +19,8 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "q_proj",
23
- "v_proj"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "v_proj",
23
+ "q_proj"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_rslora": false
adapter_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0d020cb058b8d3a82a1b020cfd060917438f439deb6264b5b143d2c8defaf9a
3
+ size 134264202
special_tokens_map.json CHANGED
@@ -7,10 +7,6 @@
7
  "▁<PRE>",
8
  "▁<MID>",
9
  "▁<SUF>",
10
- "▁<EOT>",
11
- "▁<PRE>",
12
- "▁<MID>",
13
- "▁<SUF>",
14
  "▁<EOT>"
15
  ],
16
  "bos_token": {
 
7
  "▁<PRE>",
8
  "▁<MID>",
9
  "▁<SUF>",
 
 
 
 
10
  "▁<EOT>"
11
  ],
12
  "bos_token": {
tokenizer_config.json CHANGED
@@ -1,6 +1,4 @@
1
  {
2
- "add_bos_token": true,
3
- "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
@@ -81,7 +79,7 @@
81
  "legacy": null,
82
  "middle_token": "▁<MID>",
83
  "model_max_length": 1000000000000000019884624838656,
84
- "pad_token": "</s>",
85
  "prefix_token": "▁<PRE>",
86
  "sp_model_kwargs": {},
87
  "suffix_token": "▁<SUF>",
 
1
  {
 
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<unk>",
 
79
  "legacy": null,
80
  "middle_token": "▁<MID>",
81
  "model_max_length": 1000000000000000019884624838656,
82
+ "pad_token": null,
83
  "prefix_token": "▁<PRE>",
84
  "sp_model_kwargs": {},
85
  "suffix_token": "▁<SUF>",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:842833d2f929d1c6adda83e82a0a3ccd2009db0a45977214c767ab76438c00f1
3
- size 4984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94e0e645e1fa625481de400144e7c1c024c497f32977461ccb6c11987d9e9364
3
+ size 4536