TinyPixel commited on
Commit
a49cdf3
·
1 Parent(s): 8cd3a26

Upload folder using huggingface_hub

Browse files
adapter_config.json CHANGED
@@ -19,9 +19,9 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "gate_proj",
23
- "up_proj",
24
- "down_proj"
25
  ],
26
  "task_type": "CAUSAL_LM"
27
  }
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "down_proj",
23
  "gate_proj",
24
+ "up_proj"
 
25
  ],
26
  "task_type": "CAUSAL_LM"
27
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7a926ef25b9cdd2b797bf64978a73ca0ae89e2f139931f2548b25cf518023ca2
3
  size 113271504
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3fbb3def4431ffde81c233629cf753d5463b95a4600fe361b04edc5eb3d84e5
3
  size 113271504
tokenizer_config.json CHANGED
@@ -29,6 +29,7 @@
29
  },
30
  "additional_special_tokens": [],
31
  "bos_token": "<s>",
 
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
  "legacy": true,
 
29
  },
30
  "additional_special_tokens": [],
31
  "bos_token": "<s>",
32
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '### User:\n' + message['content'] }}\n{% elif message['role'] == 'system' %}\n{{ '### System:\n' + message['content'] }}\n{% elif message['role'] == 'assistant' %}\n{{ '### Assistant:\n' + message['content'] }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '### Assistant:' }}\n{% endif %}\n{% endfor %}",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
  "legacy": true,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:25b2e1d814285761e9b408722faa49e891af61f981d068b5d85f17f46a2a68f8
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80edb04acfa4ae493a45a85880a890cec0ff9e08ab6aed9f6e3c619d7c2cd139
3
  size 4728