LeroyDyer commited on
Commit
1511f8c
1 Parent(s): 3b1d368

(Trained with Unsloth)

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "LeroyDyer/_Spydaz_Web_AI_ChatML_B",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -22,8 +22,8 @@
22
  "rope_theta": 10000.0,
23
  "sliding_window": 8192,
24
  "tie_word_embeddings": false,
25
- "torch_dtype": "bfloat16",
26
- "transformers_version": "4.43.3",
27
  "unsloth_version": "2024.8",
28
  "use_cache": false,
29
  "vocab_size": 32000
 
1
  {
2
+ "_name_or_path": "LeroyDyer/_Spydaz_Web_AI_ChatQA",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
22
  "rope_theta": 10000.0,
23
  "sliding_window": 8192,
24
  "tie_word_embeddings": false,
25
+ "torch_dtype": "float16",
26
+ "transformers_version": "4.44.0",
27
  "unsloth_version": "2024.8",
28
  "use_cache": false,
29
  "vocab_size": 32000
generation_config.json CHANGED
@@ -2,7 +2,8 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
 
5
  "pad_token_id": 0,
6
- "transformers_version": "4.43.3",
7
  "use_cache": false
8
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
+ "max_length": 32768,
6
  "pad_token_id": 0,
7
+ "transformers_version": "4.44.0",
8
  "use_cache": false
9
  }
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8e66e5b2dfe9ee84a4b0e2ae638f8b11868a49993bcd7c43bb3a48c98f7f458a
3
- size 4943162336
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e08ef01aa966447849777c4be2b4fab7a18b967a5b1aa448014ce20b001fd6c5
3
+ size 4943162240
model-00002-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8c3feb4e547e006948115dfa40bd00c47f2428a02bdc5fba9a2bad6d402a12a5
3
- size 4999819336
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd1fcdcb131e8e8886c400a2b1ec1bc7bb81fac3db3bc0eaa82cec61ca45cf0f
3
+ size 4999819232
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6610d4373cdd1c6e26c28a1cc822897a5febd6e1f25c573ba030586a0e7cdb7f
3
- size 4540516344
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15fa8be774ac17361831d27f69ad27807d033eb09ba6ee7e46f99b897be4eafe
3
+ size 4540516256
special_tokens_map.json CHANGED
@@ -1,6 +1,30 @@
1
  {
2
- "bos_token": "<s>",
3
- "eos_token": "<|im_end|>",
4
- "pad_token": "<unk>",
5
- "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  }
 
1
  {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
  }
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 2048,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
tokenizer_config.json CHANGED
@@ -29,13 +29,13 @@
29
  }
30
  },
31
  "bos_token": "<s>",
32
- "chat_template": "{% for message in messages %}{% if message['from'] == 'human' %}{{'<|im_start|>user\n' + message['value'] + '<|im_end|>\n'}}{% elif message['from'] == 'gpt' %}{{'<|im_start|>assistant\n' + message['value'] + '<|im_end|>\n' }}{% else %}{{ '<|im_start|>system\n' + message['value'] + '<|im_end|>\n' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "<|im_end|>",
35
  "legacy": true,
36
  "max_length": 2048,
37
- "model_max_length": 1000000000000000019884624838656,
38
  "pad_token": "<unk>",
 
39
  "stride": 0,
40
  "tokenizer_class": "LlamaTokenizer",
41
  "truncation_side": "right",
 
29
  }
30
  },
31
  "bos_token": "<s>",
 
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "<|im_end|>",
34
  "legacy": true,
35
  "max_length": 2048,
36
+ "model_max_length": 32768,
37
  "pad_token": "<unk>",
38
+ "padding_side": "left",
39
  "stride": 0,
40
  "tokenizer_class": "LlamaTokenizer",
41
  "truncation_side": "right",