apepkuss79 commited on
Commit
f0e6265
1 Parent(s): 342c654

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +169 -0
config.json ADDED
@@ -0,0 +1,169 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Phi-3-medium-128k-instruct",
3
+ "architectures": [
4
+ "Phi3ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_phi3.Phi3Config",
9
+ "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
10
+ },
11
+ "bos_token_id": 1,
12
+ "embd_pdrop": 0.0,
13
+ "eos_token_id": 32000,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 5120,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 17920,
18
+ "max_position_embeddings": 131072,
19
+ "model_type": "phi3",
20
+ "num_attention_heads": 40,
21
+ "num_hidden_layers": 40,
22
+ "num_key_value_heads": 10,
23
+ "original_max_position_embeddings": 4096,
24
+ "pad_token_id": null,
25
+ "resid_pdrop": 0.0,
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_scaling": {
28
+ "long_factor": [
29
+ 1.0,
30
+ 1.0,
31
+ 1.0,
32
+ 1.0,
33
+ 1.0,
34
+ 1.0,
35
+ 1.0,
36
+ 1.0,
37
+ 1.0,
38
+ 1.0,
39
+ 1.0,
40
+ 1.0,
41
+ 1.0,
42
+ 1.25,
43
+ 1.25,
44
+ 1.5,
45
+ 2.0,
46
+ 2.75,
47
+ 5.75,
48
+ 5.75,
49
+ 6.5,
50
+ 9.25,
51
+ 11.0,
52
+ 13.25,
53
+ 19.25,
54
+ 19.75,
55
+ 19.75,
56
+ 21.25,
57
+ 21.5,
58
+ 26.5,
59
+ 30.0,
60
+ 33.75,
61
+ 35.25,
62
+ 38.5,
63
+ 42.0,
64
+ 42.25,
65
+ 46.0,
66
+ 47.0,
67
+ 50.0,
68
+ 50.5,
69
+ 51.0,
70
+ 52.0,
71
+ 52.75,
72
+ 53.75,
73
+ 54.75,
74
+ 57.0,
75
+ 57.25,
76
+ 58.5,
77
+ 59.25,
78
+ 59.5,
79
+ 62.0,
80
+ 62.5,
81
+ 62.75,
82
+ 63.25,
83
+ 63.25,
84
+ 63.25,
85
+ 63.75,
86
+ 64.0,
87
+ 64.0,
88
+ 64.25,
89
+ 64.5,
90
+ 64.5,
91
+ 65.0,
92
+ 65.0
93
+ ],
94
+ "short_factor": [
95
+ 1.0,
96
+ 1.0,
97
+ 1.0,
98
+ 1.0,
99
+ 1.0,
100
+ 1.0,
101
+ 1.01,
102
+ 1.02,
103
+ 1.02,
104
+ 1.04,
105
+ 1.04,
106
+ 1.07,
107
+ 1.07,
108
+ 1.1,
109
+ 1.3000000000000003,
110
+ 1.3000000000000003,
111
+ 1.5000000000000004,
112
+ 1.5700000000000005,
113
+ 1.9000000000000008,
114
+ 2.3100000000000014,
115
+ 2.759999999999992,
116
+ 3.3899999999999784,
117
+ 3.9399999999999666,
118
+ 4.009999999999965,
119
+ 4.289999999999959,
120
+ 4.349999999999958,
121
+ 5.349999999999937,
122
+ 6.659999999999909,
123
+ 7.029999999999901,
124
+ 7.51999999999989,
125
+ 8.00999999999988,
126
+ 8.249999999999876,
127
+ 8.279999999999875,
128
+ 9.629999999999846,
129
+ 9.89999999999984,
130
+ 10.589999999999826,
131
+ 11.049999999999816,
132
+ 11.7899999999998,
133
+ 12.189999999999792,
134
+ 12.889999999999777,
135
+ 13.129999999999772,
136
+ 13.16999999999977,
137
+ 13.20999999999977,
138
+ 13.479999999999764,
139
+ 13.539999999999763,
140
+ 13.779999999999758,
141
+ 13.929999999999755,
142
+ 14.429999999999744,
143
+ 14.759999999999737,
144
+ 15.149999999999729,
145
+ 15.419999999999723,
146
+ 15.53999999999972,
147
+ 15.659999999999718,
148
+ 15.749999999999716,
149
+ 15.759999999999716,
150
+ 15.799999999999715,
151
+ 16.05999999999971,
152
+ 16.079999999999714,
153
+ 16.11999999999972,
154
+ 16.11999999999972,
155
+ 16.18999999999973,
156
+ 16.31999999999975,
157
+ 16.539999999999786,
158
+ 16.799999999999827
159
+ ],
160
+ "type": "su"
161
+ },
162
+ "rope_theta": 10000.0,
163
+ "sliding_window": 131072,
164
+ "tie_word_embeddings": false,
165
+ "torch_dtype": "bfloat16",
166
+ "transformers_version": "4.39.3",
167
+ "use_cache": true,
168
+ "vocab_size": 32064
169
+ }