tomer-shimshi commited on
Commit
c4203ca
·
verified ·
1 Parent(s): 0bf8fac

Upload 7 files

Browse files
adapter_config.json CHANGED
@@ -11,7 +11,7 @@
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
  "lora_alpha": 32,
14
- "lora_dropout": 0,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
 
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
  "lora_alpha": 32,
14
+ "lora_dropout": 0.1,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b9340fb0b847cee5d51118c3d24e69a81b4fc4c3fe5bb1bb526998283fdae904
3
  size 16794200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe5d93dc95130e7752c4bc0c54217333c807bacdc7bb400459a0070b39db86da
3
  size 16794200
tokenizer_config.json CHANGED
@@ -38,10 +38,14 @@
38
  "bos_token": "<s>",
39
  "clean_up_tokenization_spaces": false,
40
  "eos_token": "</s>",
 
41
  "model_max_length": 1000000000000000019884624838656,
42
  "pad_token": "</s>",
43
  "sp_model_kwargs": {},
 
44
  "tokenizer_class": "LlamaTokenizer",
 
 
45
  "unk_token": "<unk>",
46
  "use_default_system_prompt": false
47
  }
 
38
  "bos_token": "<s>",
39
  "clean_up_tokenization_spaces": false,
40
  "eos_token": "</s>",
41
+ "max_length": 1024,
42
  "model_max_length": 1000000000000000019884624838656,
43
  "pad_token": "</s>",
44
  "sp_model_kwargs": {},
45
+ "stride": 0,
46
  "tokenizer_class": "LlamaTokenizer",
47
+ "truncation_side": "right",
48
+ "truncation_strategy": "longest_first",
49
  "unk_token": "<unk>",
50
  "use_default_system_prompt": false
51
  }