BlueDice commited on
Commit
aba2a2e
·
1 Parent(s): b3b6f3c

Upload 11 files

Browse files
.gitattributes CHANGED
@@ -32,3 +32,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
35
+ decoder_model_merged.onnx_data filter=lfs diff=lfs merge=lfs -text
36
+ decoder_model.onnx_data filter=lfs diff=lfs merge=lfs -text
37
+ decoder_with_past_model.onnx_data filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "PygmalionAI/pygmalion-1.3b",
3
+ "architectures": [
4
+ "GPTNeoXForCausalLM"
5
+ ],
6
+ "bad_words_ids": [
7
+ [
8
+ 434,
9
+ 15694,
10
+ 66,
11
+ 27,
12
+ 209
13
+ ],
14
+ [
15
+ 15362
16
+ ],
17
+ [
18
+ 1713
19
+ ],
20
+ [
21
+ 1713,
22
+ 64
23
+ ],
24
+ [
25
+ 1713,
26
+ 876
27
+ ],
28
+ [
29
+ 2016,
30
+ 251,
31
+ 857,
32
+ 75,
33
+ 9194,
34
+ 35478
35
+ ],
36
+ [
37
+ 2391
38
+ ],
39
+ [
40
+ 20340
41
+ ],
42
+ [
43
+ 33021
44
+ ],
45
+ [
46
+ 2391,
47
+ 1051
48
+ ],
49
+ [
50
+ 5638
51
+ ],
52
+ [
53
+ 2391,
54
+ 20340
55
+ ],
56
+ [
57
+ 5638,
58
+ 537
59
+ ],
60
+ [
61
+ 1559,
62
+ 2345
63
+ ],
64
+ [
65
+ 1559,
66
+ 7849
67
+ ],
68
+ [
69
+ 1559,
70
+ 17379
71
+ ],
72
+ [
73
+ 25321,
74
+ 4611
75
+ ]
76
+ ],
77
+ "bos_token_id": 0,
78
+ "eos_token_id": 0,
79
+ "hidden_act": "gelu",
80
+ "hidden_size": 2048,
81
+ "initializer_range": 0.02,
82
+ "intermediate_size": 8192,
83
+ "layer_norm_eps": 1e-05,
84
+ "max_position_embeddings": 2048,
85
+ "model_type": "gpt_neox",
86
+ "num_attention_heads": 16,
87
+ "num_hidden_layers": 24,
88
+ "rotary_emb_base": 10000,
89
+ "rotary_pct": 0.25,
90
+ "tie_word_embeddings": false,
91
+ "transformers_version": "4.28.1",
92
+ "use_cache": true,
93
+ "use_parallel_residual": true,
94
+ "vocab_size": 50304
95
+ }
decoder_model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b6b54e5aad59218fb1b4f0f709c2ef1f7818ff5f035d4407dd0afebd2fe197f
3
+ size 2042499
decoder_model.onnx_data ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68951757486419a275ef05c4e509b3d9d1bb5a961c51afcfdee01f452b7a2636
3
+ size 5662785536
decoder_model_merged.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b1575794a619a3ca0eb657b46ee5454920b98a2e31aff35c1afe11efae5ab22
3
+ size 4095593
decoder_model_merged.onnx_data ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68951757486419a275ef05c4e509b3d9d1bb5a961c51afcfdee01f452b7a2636
3
+ size 5662785536
decoder_with_past_model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8eca63728855c876f9fed324e98b0a21a66671a22d00a023dcf4964caa215928
3
+ size 2076877
decoder_with_past_model.onnx_data ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68951757486419a275ef05c4e509b3d9d1bb5a961c51afcfdee01f452b7a2636
3
+ size 5662785536
generation_config.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bad_words_ids": [
4
+ [
5
+ 434,
6
+ 15694,
7
+ 66,
8
+ 27,
9
+ 209
10
+ ],
11
+ [
12
+ 15362
13
+ ],
14
+ [
15
+ 1713
16
+ ],
17
+ [
18
+ 1713,
19
+ 64
20
+ ],
21
+ [
22
+ 1713,
23
+ 876
24
+ ],
25
+ [
26
+ 2016,
27
+ 251,
28
+ 857,
29
+ 75,
30
+ 9194,
31
+ 35478
32
+ ],
33
+ [
34
+ 2391
35
+ ],
36
+ [
37
+ 20340
38
+ ],
39
+ [
40
+ 33021
41
+ ],
42
+ [
43
+ 2391,
44
+ 1051
45
+ ],
46
+ [
47
+ 5638
48
+ ],
49
+ [
50
+ 2391,
51
+ 20340
52
+ ],
53
+ [
54
+ 5638,
55
+ 537
56
+ ],
57
+ [
58
+ 1559,
59
+ 2345
60
+ ],
61
+ [
62
+ 1559,
63
+ 7849
64
+ ],
65
+ [
66
+ 1559,
67
+ 17379
68
+ ],
69
+ [
70
+ 25321,
71
+ 4611
72
+ ]
73
+ ],
74
+ "bos_token_id": 0,
75
+ "eos_token_id": 0,
76
+ "transformers_version": "4.28.1"
77
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "unk_token": "<|endoftext|>"
5
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "bos_token": "<|endoftext|>",
4
+ "clean_up_tokenization_spaces": true,
5
+ "eos_token": "<|endoftext|>",
6
+ "model_max_length": 1000000000000000019884624838656,
7
+ "tokenizer_class": "GPTNeoXTokenizer",
8
+ "unk_token": "<|endoftext|>"
9
+ }