Toflamus/GPT-2_para3M_256
Browse files- README.md +228 -0
- config.json +39 -0
- generation_config.json +6 -0
- merges.txt +0 -0
- pytorch_model.bin +3 -0
- special_tokens_map.json +6 -0
- tokenizer.json +0 -0
- tokenizer_config.json +9 -0
- training_args.bin +3 -0
- vocab.json +0 -0
README.md
ADDED
@@ -0,0 +1,228 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: mit
|
3 |
+
base_model: gpt2
|
4 |
+
tags:
|
5 |
+
- generated_from_trainer
|
6 |
+
model-index:
|
7 |
+
- name: GPT-2_para3M_512
|
8 |
+
results: []
|
9 |
+
---
|
10 |
+
|
11 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
12 |
+
should probably proofread and complete it, then remove this comment. -->
|
13 |
+
|
14 |
+
# GPT-2_para3M_512
|
15 |
+
|
16 |
+
This model is a fine-tuned version of [gpt2](https://huggingface.co/gpt2) on an unknown dataset.
|
17 |
+
It achieves the following results on the evaluation set:
|
18 |
+
- Loss: 2.1100
|
19 |
+
|
20 |
+
## Model description
|
21 |
+
|
22 |
+
More information needed
|
23 |
+
|
24 |
+
## Intended uses & limitations
|
25 |
+
|
26 |
+
More information needed
|
27 |
+
|
28 |
+
## Training and evaluation data
|
29 |
+
|
30 |
+
More information needed
|
31 |
+
|
32 |
+
## Training procedure
|
33 |
+
|
34 |
+
### Training hyperparameters
|
35 |
+
|
36 |
+
The following hyperparameters were used during training:
|
37 |
+
- learning_rate: 0.0005
|
38 |
+
- train_batch_size: 8
|
39 |
+
- eval_batch_size: 8
|
40 |
+
- seed: 42
|
41 |
+
- gradient_accumulation_steps: 8
|
42 |
+
- total_train_batch_size: 64
|
43 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
44 |
+
- lr_scheduler_type: cosine
|
45 |
+
- lr_scheduler_warmup_steps: 100
|
46 |
+
- num_epochs: 2
|
47 |
+
|
48 |
+
### Training results
|
49 |
+
|
50 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
51 |
+
|:-------------:|:-----:|:-----:|:---------------:|
|
52 |
+
| 4.1873 | 0.01 | 500 | 4.0187 |
|
53 |
+
| 3.5461 | 0.02 | 1000 | 3.4287 |
|
54 |
+
| 3.2706 | 0.04 | 1500 | 3.1495 |
|
55 |
+
| 3.105 | 0.05 | 2000 | 2.9773 |
|
56 |
+
| 2.9885 | 0.06 | 2500 | 2.8566 |
|
57 |
+
| 2.8931 | 0.07 | 3000 | 2.7720 |
|
58 |
+
| 2.8307 | 0.08 | 3500 | 2.7016 |
|
59 |
+
| 2.7912 | 0.09 | 4000 | 2.6474 |
|
60 |
+
| 2.7295 | 0.11 | 4500 | 2.5972 |
|
61 |
+
| 2.6927 | 0.12 | 5000 | 2.5641 |
|
62 |
+
| 2.6756 | 0.13 | 5500 | 2.5248 |
|
63 |
+
| 2.6536 | 0.14 | 6000 | 2.4972 |
|
64 |
+
| 2.6186 | 0.15 | 6500 | 2.4730 |
|
65 |
+
| 2.5947 | 0.17 | 7000 | 2.4492 |
|
66 |
+
| 2.591 | 0.18 | 7500 | 2.4313 |
|
67 |
+
| 2.5706 | 0.19 | 8000 | 2.4172 |
|
68 |
+
| 2.5441 | 0.2 | 8500 | 2.3991 |
|
69 |
+
| 2.5266 | 0.21 | 9000 | 2.3838 |
|
70 |
+
| 2.5259 | 0.22 | 9500 | 2.3740 |
|
71 |
+
| 2.5173 | 0.24 | 10000 | 2.3629 |
|
72 |
+
| 2.5122 | 0.25 | 10500 | 2.3549 |
|
73 |
+
| 2.5004 | 0.26 | 11000 | 2.3409 |
|
74 |
+
| 2.4902 | 0.27 | 11500 | 2.3364 |
|
75 |
+
| 2.4735 | 0.28 | 12000 | 2.3242 |
|
76 |
+
| 2.4784 | 0.29 | 12500 | 2.3193 |
|
77 |
+
| 2.4754 | 0.31 | 13000 | 2.3126 |
|
78 |
+
| 2.4587 | 0.32 | 13500 | 2.3077 |
|
79 |
+
| 2.4613 | 0.33 | 14000 | 2.3050 |
|
80 |
+
| 2.4562 | 0.34 | 14500 | 2.2968 |
|
81 |
+
| 2.4422 | 0.35 | 15000 | 2.2913 |
|
82 |
+
| 2.4307 | 0.37 | 15500 | 2.2870 |
|
83 |
+
| 2.4339 | 0.38 | 16000 | 2.2814 |
|
84 |
+
| 2.445 | 0.39 | 16500 | 2.2801 |
|
85 |
+
| 2.4257 | 0.4 | 17000 | 2.2747 |
|
86 |
+
| 2.425 | 0.41 | 17500 | 2.2709 |
|
87 |
+
| 2.4095 | 0.42 | 18000 | 2.2672 |
|
88 |
+
| 2.4137 | 0.44 | 18500 | 2.2632 |
|
89 |
+
| 2.4284 | 0.45 | 19000 | 2.2601 |
|
90 |
+
| 2.419 | 0.46 | 19500 | 2.2569 |
|
91 |
+
| 2.4221 | 0.47 | 20000 | 2.2504 |
|
92 |
+
| 2.3951 | 0.48 | 20500 | 2.2507 |
|
93 |
+
| 2.4054 | 0.5 | 21000 | 2.2515 |
|
94 |
+
| 2.3977 | 0.51 | 21500 | 2.2442 |
|
95 |
+
| 2.4009 | 0.52 | 22000 | 2.2422 |
|
96 |
+
| 2.3941 | 0.53 | 22500 | 2.2388 |
|
97 |
+
| 2.3909 | 0.54 | 23000 | 2.2349 |
|
98 |
+
| 2.4016 | 0.55 | 23500 | 2.2380 |
|
99 |
+
| 2.389 | 0.57 | 24000 | 2.2326 |
|
100 |
+
| 2.3864 | 0.58 | 24500 | 2.2287 |
|
101 |
+
| 2.3795 | 0.59 | 25000 | 2.2285 |
|
102 |
+
| 2.3817 | 0.6 | 25500 | 2.2266 |
|
103 |
+
| 2.3789 | 0.61 | 26000 | 2.2256 |
|
104 |
+
| 2.3801 | 0.62 | 26500 | 2.2210 |
|
105 |
+
| 2.3687 | 0.64 | 27000 | 2.2189 |
|
106 |
+
| 2.378 | 0.65 | 27500 | 2.2194 |
|
107 |
+
| 2.3735 | 0.66 | 28000 | 2.2157 |
|
108 |
+
| 2.3758 | 0.67 | 28500 | 2.2142 |
|
109 |
+
| 2.3616 | 0.68 | 29000 | 2.2133 |
|
110 |
+
| 2.3731 | 0.7 | 29500 | 2.2085 |
|
111 |
+
| 2.3606 | 0.71 | 30000 | 2.2115 |
|
112 |
+
| 2.3516 | 0.72 | 30500 | 2.2072 |
|
113 |
+
| 2.3551 | 0.73 | 31000 | 2.2067 |
|
114 |
+
| 2.3626 | 0.74 | 31500 | 2.2033 |
|
115 |
+
| 2.3516 | 0.75 | 32000 | 2.2031 |
|
116 |
+
| 2.3658 | 0.77 | 32500 | 2.2008 |
|
117 |
+
| 2.3554 | 0.78 | 33000 | 2.1992 |
|
118 |
+
| 2.3524 | 0.79 | 33500 | 2.1988 |
|
119 |
+
| 2.3509 | 0.8 | 34000 | 2.1996 |
|
120 |
+
| 2.3474 | 0.81 | 34500 | 2.1949 |
|
121 |
+
| 2.3431 | 0.83 | 35000 | 2.1943 |
|
122 |
+
| 2.3413 | 0.84 | 35500 | 2.1907 |
|
123 |
+
| 2.3592 | 0.85 | 36000 | 2.1917 |
|
124 |
+
| 2.3636 | 0.86 | 36500 | 2.1919 |
|
125 |
+
| 2.3529 | 0.87 | 37000 | 2.1881 |
|
126 |
+
| 2.3371 | 0.88 | 37500 | 2.1875 |
|
127 |
+
| 2.3413 | 0.9 | 38000 | 2.1856 |
|
128 |
+
| 2.3463 | 0.91 | 38500 | 2.1839 |
|
129 |
+
| 2.3303 | 0.92 | 39000 | 2.1859 |
|
130 |
+
| 2.3432 | 0.93 | 39500 | 2.1790 |
|
131 |
+
| 2.3455 | 0.94 | 40000 | 2.1801 |
|
132 |
+
| 2.344 | 0.95 | 40500 | 2.1761 |
|
133 |
+
| 2.3442 | 0.97 | 41000 | 2.1759 |
|
134 |
+
| 2.3331 | 0.98 | 41500 | 2.1760 |
|
135 |
+
| 2.3391 | 0.99 | 42000 | 2.1748 |
|
136 |
+
| 2.3275 | 1.0 | 42500 | 2.1760 |
|
137 |
+
| 2.3308 | 1.01 | 43000 | 2.1712 |
|
138 |
+
| 2.3191 | 1.03 | 43500 | 2.1727 |
|
139 |
+
| 2.3182 | 1.04 | 44000 | 2.1682 |
|
140 |
+
| 2.3184 | 1.05 | 44500 | 2.1683 |
|
141 |
+
| 2.3177 | 1.06 | 45000 | 2.1668 |
|
142 |
+
| 2.3163 | 1.07 | 45500 | 2.1643 |
|
143 |
+
| 2.321 | 1.08 | 46000 | 2.1631 |
|
144 |
+
| 2.3164 | 1.1 | 46500 | 2.1655 |
|
145 |
+
| 2.3231 | 1.11 | 47000 | 2.1631 |
|
146 |
+
| 2.3139 | 1.12 | 47500 | 2.1591 |
|
147 |
+
| 2.3223 | 1.13 | 48000 | 2.1588 |
|
148 |
+
| 2.3133 | 1.14 | 48500 | 2.1588 |
|
149 |
+
| 2.2995 | 1.16 | 49000 | 2.1569 |
|
150 |
+
| 2.308 | 1.17 | 49500 | 2.1578 |
|
151 |
+
| 2.3062 | 1.18 | 50000 | 2.1539 |
|
152 |
+
| 2.3203 | 1.19 | 50500 | 2.1538 |
|
153 |
+
| 2.3116 | 1.2 | 51000 | 2.1526 |
|
154 |
+
| 2.294 | 1.21 | 51500 | 2.1520 |
|
155 |
+
| 2.2941 | 1.23 | 52000 | 2.1499 |
|
156 |
+
| 2.3053 | 1.24 | 52500 | 2.1502 |
|
157 |
+
| 2.3154 | 1.25 | 53000 | 2.1507 |
|
158 |
+
| 2.3057 | 1.26 | 53500 | 2.1485 |
|
159 |
+
| 2.3106 | 1.27 | 54000 | 2.1464 |
|
160 |
+
| 2.3035 | 1.28 | 54500 | 2.1457 |
|
161 |
+
| 2.304 | 1.3 | 55000 | 2.1445 |
|
162 |
+
| 2.2985 | 1.31 | 55500 | 2.1439 |
|
163 |
+
| 2.296 | 1.32 | 56000 | 2.1421 |
|
164 |
+
| 2.2917 | 1.33 | 56500 | 2.1411 |
|
165 |
+
| 2.2936 | 1.34 | 57000 | 2.1406 |
|
166 |
+
| 2.2866 | 1.36 | 57500 | 2.1383 |
|
167 |
+
| 2.2973 | 1.37 | 58000 | 2.1396 |
|
168 |
+
| 2.2865 | 1.38 | 58500 | 2.1378 |
|
169 |
+
| 2.2929 | 1.39 | 59000 | 2.1370 |
|
170 |
+
| 2.2858 | 1.4 | 59500 | 2.1351 |
|
171 |
+
| 2.2857 | 1.41 | 60000 | 2.1350 |
|
172 |
+
| 2.3019 | 1.43 | 60500 | 2.1338 |
|
173 |
+
| 2.289 | 1.44 | 61000 | 2.1330 |
|
174 |
+
| 2.2874 | 1.45 | 61500 | 2.1318 |
|
175 |
+
| 2.2858 | 1.46 | 62000 | 2.1305 |
|
176 |
+
| 2.2875 | 1.47 | 62500 | 2.1298 |
|
177 |
+
| 2.2859 | 1.49 | 63000 | 2.1294 |
|
178 |
+
| 2.28 | 1.5 | 63500 | 2.1275 |
|
179 |
+
| 2.2866 | 1.51 | 64000 | 2.1277 |
|
180 |
+
| 2.2851 | 1.52 | 64500 | 2.1281 |
|
181 |
+
| 2.2806 | 1.53 | 65000 | 2.1258 |
|
182 |
+
| 2.2889 | 1.54 | 65500 | 2.1245 |
|
183 |
+
| 2.2745 | 1.56 | 66000 | 2.1249 |
|
184 |
+
| 2.2739 | 1.57 | 66500 | 2.1230 |
|
185 |
+
| 2.2853 | 1.58 | 67000 | 2.1226 |
|
186 |
+
| 2.2773 | 1.59 | 67500 | 2.1228 |
|
187 |
+
| 2.2742 | 1.6 | 68000 | 2.1214 |
|
188 |
+
| 2.2656 | 1.61 | 68500 | 2.1200 |
|
189 |
+
| 2.2756 | 1.63 | 69000 | 2.1194 |
|
190 |
+
| 2.2806 | 1.64 | 69500 | 2.1193 |
|
191 |
+
| 2.271 | 1.65 | 70000 | 2.1186 |
|
192 |
+
| 2.2671 | 1.66 | 70500 | 2.1185 |
|
193 |
+
| 2.2718 | 1.67 | 71000 | 2.1168 |
|
194 |
+
| 2.2781 | 1.69 | 71500 | 2.1172 |
|
195 |
+
| 2.2744 | 1.7 | 72000 | 2.1164 |
|
196 |
+
| 2.2744 | 1.71 | 72500 | 2.1156 |
|
197 |
+
| 2.2603 | 1.72 | 73000 | 2.1154 |
|
198 |
+
| 2.2703 | 1.73 | 73500 | 2.1141 |
|
199 |
+
| 2.267 | 1.74 | 74000 | 2.1141 |
|
200 |
+
| 2.2614 | 1.76 | 74500 | 2.1141 |
|
201 |
+
| 2.263 | 1.77 | 75000 | 2.1133 |
|
202 |
+
| 2.2668 | 1.78 | 75500 | 2.1128 |
|
203 |
+
| 2.2642 | 1.79 | 76000 | 2.1128 |
|
204 |
+
| 2.2637 | 1.8 | 76500 | 2.1128 |
|
205 |
+
| 2.2692 | 1.82 | 77000 | 2.1118 |
|
206 |
+
| 2.2631 | 1.83 | 77500 | 2.1117 |
|
207 |
+
| 2.2567 | 1.84 | 78000 | 2.1116 |
|
208 |
+
| 2.2707 | 1.85 | 78500 | 2.1112 |
|
209 |
+
| 2.2707 | 1.86 | 79000 | 2.1109 |
|
210 |
+
| 2.2664 | 1.87 | 79500 | 2.1114 |
|
211 |
+
| 2.266 | 1.89 | 80000 | 2.1113 |
|
212 |
+
| 2.2645 | 1.9 | 80500 | 2.1108 |
|
213 |
+
| 2.2767 | 1.91 | 81000 | 2.1106 |
|
214 |
+
| 2.274 | 1.92 | 81500 | 2.1102 |
|
215 |
+
| 2.2587 | 1.93 | 82000 | 2.1102 |
|
216 |
+
| 2.2736 | 1.94 | 82500 | 2.1100 |
|
217 |
+
| 2.2633 | 1.96 | 83000 | 2.1102 |
|
218 |
+
| 2.2652 | 1.97 | 83500 | 2.1100 |
|
219 |
+
| 2.2655 | 1.98 | 84000 | 2.1101 |
|
220 |
+
| 2.2683 | 1.99 | 84500 | 2.1100 |
|
221 |
+
|
222 |
+
|
223 |
+
### Framework versions
|
224 |
+
|
225 |
+
- Transformers 4.32.0
|
226 |
+
- Pytorch 2.0.1+cu117
|
227 |
+
- Datasets 2.14.4
|
228 |
+
- Tokenizers 0.13.2
|
config.json
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "gpt2",
|
3 |
+
"activation_function": "gelu_new",
|
4 |
+
"architectures": [
|
5 |
+
"GPT2LMHeadModel"
|
6 |
+
],
|
7 |
+
"attn_pdrop": 0.1,
|
8 |
+
"bos_token_id": 50256,
|
9 |
+
"embd_pdrop": 0.1,
|
10 |
+
"eos_token_id": 50256,
|
11 |
+
"initializer_range": 0.02,
|
12 |
+
"layer_norm_epsilon": 1e-05,
|
13 |
+
"model_type": "gpt2",
|
14 |
+
"n_ctx": 256,
|
15 |
+
"n_embd": 64,
|
16 |
+
"n_head": 8,
|
17 |
+
"n_inner": null,
|
18 |
+
"n_layer": 6,
|
19 |
+
"n_positions": 1024,
|
20 |
+
"reorder_and_upcast_attn": false,
|
21 |
+
"resid_pdrop": 0.1,
|
22 |
+
"scale_attn_by_inverse_layer_idx": false,
|
23 |
+
"scale_attn_weights": true,
|
24 |
+
"summary_activation": null,
|
25 |
+
"summary_first_dropout": 0.1,
|
26 |
+
"summary_proj_to_labels": true,
|
27 |
+
"summary_type": "cls_index",
|
28 |
+
"summary_use_proj": true,
|
29 |
+
"task_specific_params": {
|
30 |
+
"text-generation": {
|
31 |
+
"do_sample": true,
|
32 |
+
"max_length": 50
|
33 |
+
}
|
34 |
+
},
|
35 |
+
"torch_dtype": "float32",
|
36 |
+
"transformers_version": "4.32.0",
|
37 |
+
"use_cache": true,
|
38 |
+
"vocab_size": 50257
|
39 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 50256,
|
4 |
+
"eos_token_id": 50256,
|
5 |
+
"transformers_version": "4.32.0"
|
6 |
+
}
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7eb5e077ed85648365991e05f202e7b200cee30d76fe05a25b438ef9d38da578
|
3 |
+
size 14352341
|
special_tokens_map.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": "<|endoftext|>",
|
3 |
+
"eos_token": "<|endoftext|>",
|
4 |
+
"pad_token": "<|endoftext|>",
|
5 |
+
"unk_token": "<|endoftext|>"
|
6 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"bos_token": "<|endoftext|>",
|
4 |
+
"clean_up_tokenization_spaces": true,
|
5 |
+
"eos_token": "<|endoftext|>",
|
6 |
+
"model_max_length": 1024,
|
7 |
+
"tokenizer_class": "GPT2Tokenizer",
|
8 |
+
"unk_token": "<|endoftext|>"
|
9 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:178283ad030dc217fe4ba290350184c1c391a77c96388bdee74eed15231ae63c
|
3 |
+
size 4027
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|