timo13113 commited on
Commit
4b3f9be
·
verified ·
1 Parent(s): 5a668e0

Training in progress, step 100

Browse files
config.json CHANGED
@@ -35,5 +35,5 @@
35
  "torch_dtype": "float32",
36
  "transformers_version": "4.35.2",
37
  "use_cache": true,
38
- "vocab_size": 14
39
  }
 
35
  "torch_dtype": "float32",
36
  "transformers_version": "4.35.2",
37
  "use_cache": true,
38
+ "vocab_size": 15
39
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a818da5cf7e3de3a5219bd7b687df2d14d9812acbb5dd7fed90e32d8161a4417
3
- size 343427712
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a5a87b2332ca1f1c38164b3159a8d5d647668b93477343331df48e072b62f681
3
+ size 343430784
runs/Mar13_00-10-14_DESKTOP-C4VDTPF/events.out.tfevents.1710277816.DESKTOP-C4VDTPF.32644.24 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f723967df55d603723d869d6d3d9860b65a0ccce22cf3097a4cca4a73b56d973
3
- size 27677
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b364c078bcd71c93eda73d7e3aa14a6112f5685b775ef19cda8192ca3fc2e47
3
+ size 28677
runs/Mar13_01-10-31_DESKTOP-C4VDTPF/events.out.tfevents.1710281484.DESKTOP-C4VDTPF.32644.25 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9075678903690a02bc3bf8bae3a1181d7df6af5de1f7f2d37835bf534df82a1f
3
+ size 5685
special_tokens_map.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "bos_token": "$",
3
- "eos_token": "$",
4
- "pad_token": "$"
5
  }
 
1
  {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "pad_token": "<|endoftext|>"
5
  }
tokenizer.json CHANGED
@@ -5,7 +5,7 @@
5
  "added_tokens": [
6
  {
7
  "id": 0,
8
- "content": "$",
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
@@ -39,20 +39,21 @@
39
  "fuse_unk": false,
40
  "byte_fallback": false,
41
  "vocab": {
42
- "$": 0,
43
- "+": 1,
44
- "0": 2,
45
- "1": 3,
46
- "2": 4,
47
- "3": 5,
48
- "4": 6,
49
- "5": 7,
50
- "6": 8,
51
- "7": 9,
52
- "8": 10,
53
- "9": 11,
54
- ";": 12,
55
- "=": 13
 
56
  },
57
  "merges": []
58
  }
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
8
+ "content": "<|endoftext|>",
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
 
39
  "fuse_unk": false,
40
  "byte_fallback": false,
41
  "vocab": {
42
+ "<|endoftext|>": 0,
43
+ "$": 1,
44
+ "+": 2,
45
+ "0": 3,
46
+ "1": 4,
47
+ "2": 5,
48
+ "3": 6,
49
+ "4": 7,
50
+ "5": 8,
51
+ "6": 9,
52
+ "7": 10,
53
+ "8": 11,
54
+ "9": 12,
55
+ ";": 13,
56
+ "=": 14
57
  },
58
  "merges": []
59
  }
tokenizer_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "added_tokens_decoder": {
3
  "0": {
4
- "content": "$",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
@@ -9,10 +9,10 @@
9
  "special": true
10
  }
11
  },
12
- "bos_token": "$",
13
  "clean_up_tokenization_spaces": true,
14
- "eos_token": "$",
15
  "model_max_length": 1000000000000000019884624838656,
16
- "pad_token": "$",
17
  "tokenizer_class": "PreTrainedTokenizerFast"
18
  }
 
1
  {
2
  "added_tokens_decoder": {
3
  "0": {
4
+ "content": "<|endoftext|>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
 
9
  "special": true
10
  }
11
  },
12
+ "bos_token": "<|endoftext|>",
13
  "clean_up_tokenization_spaces": true,
14
+ "eos_token": "<|endoftext|>",
15
  "model_max_length": 1000000000000000019884624838656,
16
+ "pad_token": "<|endoftext|>",
17
  "tokenizer_class": "PreTrainedTokenizerFast"
18
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8d058bcea66c54be85dcff7582cc887bc93359d0c34d6890884fd1fcd31aa1f5
3
  size 4536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56e79e5677689c1f2c45ffd6361758dfd33b94f16c4f2d5f9f9ccb4979b9b9f5
3
  size 4536