haryoaw commited on
Commit
41526e0
1 Parent(s): 67b3c3b

Initial Commit

Browse files
Files changed (4) hide show
  1. README.md +41 -91
  2. eval_result_ner.json +1 -1
  3. model.safetensors +1 -1
  4. training_args.bin +1 -1
README.md CHANGED
@@ -1,14 +1,14 @@
1
  ---
2
- base_model: microsoft/mdeberta-v3-base
3
  library_name: transformers
4
  license: mit
 
 
 
5
  metrics:
6
  - precision
7
  - recall
8
  - f1
9
  - accuracy
10
- tags:
11
- - generated_from_trainer
12
  model-index:
13
  - name: scenario-non-kd-scr-ner-full-mdeberta_data-univner_full55
14
  results: []
@@ -21,11 +21,11 @@ should probably proofread and complete it, then remove this comment. -->
21
 
22
  This model is a fine-tuned version of [microsoft/mdeberta-v3-base](https://huggingface.co/microsoft/mdeberta-v3-base) on the None dataset.
23
  It achieves the following results on the evaluation set:
24
- - Loss: 0.3730
25
- - Precision: 0.6215
26
- - Recall: 0.6022
27
- - F1: 0.6117
28
- - Accuracy: 0.9630
29
 
30
  ## Model description
31
 
@@ -54,89 +54,39 @@ The following hyperparameters were used during training:
54
 
55
  ### Training results
56
 
57
- | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
58
- |:-------------:|:-------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
59
- | 0.3045 | 0.2910 | 500 | 0.2283 | 0.3540 | 0.2161 | 0.2684 | 0.9357 |
60
- | 0.1895 | 0.5821 | 1000 | 0.1836 | 0.4325 | 0.3881 | 0.4091 | 0.9470 |
61
- | 0.1453 | 0.8731 | 1500 | 0.1564 | 0.4890 | 0.4855 | 0.4873 | 0.9530 |
62
- | 0.1066 | 1.1641 | 2000 | 0.1663 | 0.5335 | 0.4569 | 0.4923 | 0.9552 |
63
- | 0.0852 | 1.4552 | 2500 | 0.1476 | 0.5104 | 0.5826 | 0.5441 | 0.9564 |
64
- | 0.0793 | 1.7462 | 3000 | 0.1463 | 0.5519 | 0.5879 | 0.5693 | 0.9590 |
65
- | 0.0707 | 2.0373 | 3500 | 0.1563 | 0.6032 | 0.5569 | 0.5791 | 0.9609 |
66
- | 0.0466 | 2.3283 | 4000 | 0.1710 | 0.6074 | 0.5667 | 0.5864 | 0.9614 |
67
- | 0.0458 | 2.6193 | 4500 | 0.1601 | 0.5843 | 0.6106 | 0.5971 | 0.9613 |
68
- | 0.0456 | 2.9104 | 5000 | 0.1588 | 0.5943 | 0.5926 | 0.5934 | 0.9624 |
69
- | 0.0308 | 3.2014 | 5500 | 0.1774 | 0.5950 | 0.5965 | 0.5957 | 0.9624 |
70
- | 0.0257 | 3.4924 | 6000 | 0.1898 | 0.5864 | 0.5852 | 0.5858 | 0.9614 |
71
- | 0.027 | 3.7835 | 6500 | 0.1869 | 0.5966 | 0.5963 | 0.5964 | 0.9619 |
72
- | 0.0248 | 4.0745 | 7000 | 0.2036 | 0.6006 | 0.5920 | 0.5962 | 0.9623 |
73
- | 0.015 | 4.3655 | 7500 | 0.2156 | 0.6220 | 0.5713 | 0.5956 | 0.9624 |
74
- | 0.0172 | 4.6566 | 8000 | 0.1987 | 0.6221 | 0.5996 | 0.6106 | 0.9632 |
75
- | 0.0171 | 4.9476 | 8500 | 0.2058 | 0.5867 | 0.6334 | 0.6091 | 0.9622 |
76
- | 0.0107 | 5.2386 | 9000 | 0.2216 | 0.5917 | 0.6125 | 0.6019 | 0.9621 |
77
- | 0.0104 | 5.5297 | 9500 | 0.2355 | 0.6218 | 0.5924 | 0.6068 | 0.9631 |
78
- | 0.0107 | 5.8207 | 10000 | 0.2279 | 0.6144 | 0.6096 | 0.6120 | 0.9625 |
79
- | 0.0099 | 6.1118 | 10500 | 0.2473 | 0.6093 | 0.6009 | 0.6051 | 0.9626 |
80
- | 0.0069 | 6.4028 | 11000 | 0.2488 | 0.5906 | 0.6133 | 0.6017 | 0.9623 |
81
- | 0.0081 | 6.6938 | 11500 | 0.2435 | 0.5928 | 0.6154 | 0.6039 | 0.9624 |
82
- | 0.0083 | 6.9849 | 12000 | 0.2470 | 0.6183 | 0.5973 | 0.6076 | 0.9627 |
83
- | 0.0046 | 7.2759 | 12500 | 0.2558 | 0.6143 | 0.5913 | 0.6026 | 0.9627 |
84
- | 0.0052 | 7.5669 | 13000 | 0.2637 | 0.5962 | 0.6205 | 0.6081 | 0.9617 |
85
- | 0.0055 | 7.8580 | 13500 | 0.2688 | 0.6019 | 0.5941 | 0.5980 | 0.9621 |
86
- | 0.0052 | 8.1490 | 14000 | 0.2702 | 0.5943 | 0.6126 | 0.6033 | 0.9618 |
87
- | 0.0036 | 8.4400 | 14500 | 0.2707 | 0.5897 | 0.6289 | 0.6087 | 0.9618 |
88
- | 0.0044 | 8.7311 | 15000 | 0.2772 | 0.6089 | 0.6044 | 0.6066 | 0.9627 |
89
- | 0.0045 | 9.0221 | 15500 | 0.2781 | 0.6104 | 0.5856 | 0.5977 | 0.9621 |
90
- | 0.0028 | 9.3132 | 16000 | 0.2813 | 0.6061 | 0.6099 | 0.6080 | 0.9627 |
91
- | 0.0032 | 9.6042 | 16500 | 0.2962 | 0.6134 | 0.5999 | 0.6066 | 0.9627 |
92
- | 0.0041 | 9.8952 | 17000 | 0.2819 | 0.6097 | 0.5980 | 0.6038 | 0.9625 |
93
- | 0.0025 | 10.1863 | 17500 | 0.2859 | 0.6138 | 0.6037 | 0.6087 | 0.9627 |
94
- | 0.0022 | 10.4773 | 18000 | 0.2976 | 0.6018 | 0.6122 | 0.6069 | 0.9624 |
95
- | 0.0027 | 10.7683 | 18500 | 0.3066 | 0.6387 | 0.5819 | 0.6090 | 0.9626 |
96
- | 0.003 | 11.0594 | 19000 | 0.2925 | 0.6402 | 0.5921 | 0.6152 | 0.9632 |
97
- | 0.002 | 11.3504 | 19500 | 0.3069 | 0.5776 | 0.6094 | 0.5931 | 0.9613 |
98
- | 0.0023 | 11.6414 | 20000 | 0.2979 | 0.6201 | 0.6063 | 0.6131 | 0.9628 |
99
- | 0.0023 | 11.9325 | 20500 | 0.3015 | 0.5935 | 0.6181 | 0.6056 | 0.9621 |
100
- | 0.0015 | 12.2235 | 21000 | 0.3179 | 0.6137 | 0.6070 | 0.6103 | 0.9629 |
101
- | 0.0016 | 12.5146 | 21500 | 0.3073 | 0.6145 | 0.6211 | 0.6178 | 0.9631 |
102
- | 0.0017 | 12.8056 | 22000 | 0.3159 | 0.6267 | 0.5914 | 0.6085 | 0.9628 |
103
- | 0.0016 | 13.0966 | 22500 | 0.3224 | 0.6003 | 0.6154 | 0.6077 | 0.9623 |
104
- | 0.0015 | 13.3877 | 23000 | 0.3160 | 0.6111 | 0.5884 | 0.5995 | 0.9624 |
105
- | 0.0016 | 13.6787 | 23500 | 0.3201 | 0.6208 | 0.6057 | 0.6132 | 0.9630 |
106
- | 0.0016 | 13.9697 | 24000 | 0.3187 | 0.6251 | 0.5954 | 0.6099 | 0.9626 |
107
- | 0.0011 | 14.2608 | 24500 | 0.3188 | 0.6253 | 0.6094 | 0.6173 | 0.9630 |
108
- | 0.0013 | 14.5518 | 25000 | 0.3178 | 0.6170 | 0.6165 | 0.6168 | 0.9629 |
109
- | 0.0011 | 14.8428 | 25500 | 0.3311 | 0.6304 | 0.5830 | 0.6058 | 0.9626 |
110
- | 0.0011 | 15.1339 | 26000 | 0.3345 | 0.6200 | 0.6077 | 0.6138 | 0.9631 |
111
- | 0.0009 | 15.4249 | 26500 | 0.3385 | 0.6107 | 0.5970 | 0.6038 | 0.9623 |
112
- | 0.0011 | 15.7159 | 27000 | 0.3289 | 0.6219 | 0.6192 | 0.6206 | 0.9632 |
113
- | 0.001 | 16.0070 | 27500 | 0.3345 | 0.6101 | 0.5986 | 0.6043 | 0.9627 |
114
- | 0.0005 | 16.2980 | 28000 | 0.3388 | 0.6202 | 0.6053 | 0.6126 | 0.9627 |
115
- | 0.0007 | 16.5891 | 28500 | 0.3375 | 0.6204 | 0.6152 | 0.6178 | 0.9630 |
116
- | 0.0009 | 16.8801 | 29000 | 0.3439 | 0.6103 | 0.6175 | 0.6139 | 0.9627 |
117
- | 0.0008 | 17.1711 | 29500 | 0.3406 | 0.6238 | 0.6149 | 0.6193 | 0.9630 |
118
- | 0.0006 | 17.4622 | 30000 | 0.3436 | 0.6147 | 0.6086 | 0.6116 | 0.9631 |
119
- | 0.0007 | 17.7532 | 30500 | 0.3336 | 0.6366 | 0.6080 | 0.6219 | 0.9633 |
120
- | 0.0005 | 18.0442 | 31000 | 0.3510 | 0.6210 | 0.6038 | 0.6123 | 0.9630 |
121
- | 0.0005 | 18.3353 | 31500 | 0.3560 | 0.6148 | 0.6038 | 0.6093 | 0.9626 |
122
- | 0.0008 | 18.6263 | 32000 | 0.3578 | 0.6195 | 0.6097 | 0.6146 | 0.9627 |
123
- | 0.0004 | 18.9173 | 32500 | 0.3573 | 0.6300 | 0.6035 | 0.6165 | 0.9631 |
124
- | 0.0005 | 19.2084 | 33000 | 0.3565 | 0.6336 | 0.6041 | 0.6185 | 0.9630 |
125
- | 0.0004 | 19.4994 | 33500 | 0.3627 | 0.6317 | 0.6047 | 0.6179 | 0.9633 |
126
- | 0.0005 | 19.7905 | 34000 | 0.3632 | 0.6161 | 0.6216 | 0.6188 | 0.9631 |
127
- | 0.0004 | 20.0815 | 34500 | 0.3581 | 0.6086 | 0.6115 | 0.6100 | 0.9629 |
128
- | 0.0004 | 20.3725 | 35000 | 0.3638 | 0.6148 | 0.6029 | 0.6088 | 0.9628 |
129
- | 0.0005 | 20.6636 | 35500 | 0.3579 | 0.6216 | 0.6022 | 0.6118 | 0.9627 |
130
- | 0.0003 | 20.9546 | 36000 | 0.3601 | 0.6014 | 0.6214 | 0.6112 | 0.9627 |
131
- | 0.0002 | 21.2456 | 36500 | 0.3671 | 0.6361 | 0.5934 | 0.6140 | 0.9631 |
132
- | 0.0003 | 21.5367 | 37000 | 0.3706 | 0.6269 | 0.6029 | 0.6147 | 0.9632 |
133
- | 0.0003 | 21.8277 | 37500 | 0.3645 | 0.6128 | 0.6080 | 0.6104 | 0.9628 |
134
- | 0.0004 | 22.1187 | 38000 | 0.3660 | 0.6157 | 0.6099 | 0.6128 | 0.9628 |
135
- | 0.0003 | 22.4098 | 38500 | 0.3625 | 0.6214 | 0.6054 | 0.6133 | 0.9633 |
136
- | 0.0003 | 22.7008 | 39000 | 0.3602 | 0.6125 | 0.6109 | 0.6117 | 0.9631 |
137
- | 0.0003 | 22.9919 | 39500 | 0.3692 | 0.6196 | 0.5931 | 0.6061 | 0.9630 |
138
- | 0.0001 | 23.2829 | 40000 | 0.3713 | 0.6226 | 0.6035 | 0.6129 | 0.9631 |
139
- | 0.0002 | 23.5739 | 40500 | 0.3730 | 0.6215 | 0.6022 | 0.6117 | 0.9630 |
140
 
141
 
142
  ### Framework versions
 
1
  ---
 
2
  library_name: transformers
3
  license: mit
4
+ base_model: microsoft/mdeberta-v3-base
5
+ tags:
6
+ - generated_from_trainer
7
  metrics:
8
  - precision
9
  - recall
10
  - f1
11
  - accuracy
 
 
12
  model-index:
13
  - name: scenario-non-kd-scr-ner-full-mdeberta_data-univner_full55
14
  results: []
 
21
 
22
  This model is a fine-tuned version of [microsoft/mdeberta-v3-base](https://huggingface.co/microsoft/mdeberta-v3-base) on the None dataset.
23
  It achieves the following results on the evaluation set:
24
+ - Loss: 0.2741
25
+ - Precision: 0.5988
26
+ - Recall: 0.6128
27
+ - F1: 0.6057
28
+ - Accuracy: 0.9621
29
 
30
  ## Model description
31
 
 
54
 
55
  ### Training results
56
 
57
+ | Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
58
+ |:-------------:|:------:|:-----:|:---------------:|:---------:|:------:|:------:|:--------:|
59
+ | 0.3006 | 0.2910 | 500 | 0.2357 | 0.3668 | 0.2130 | 0.2695 | 0.9352 |
60
+ | 0.1928 | 0.5821 | 1000 | 0.1875 | 0.4361 | 0.3503 | 0.3885 | 0.9462 |
61
+ | 0.1489 | 0.8731 | 1500 | 0.1576 | 0.4740 | 0.4823 | 0.4782 | 0.9525 |
62
+ | 0.1075 | 1.1641 | 2000 | 0.1615 | 0.5266 | 0.4835 | 0.5041 | 0.9564 |
63
+ | 0.0865 | 1.4552 | 2500 | 0.1481 | 0.5230 | 0.5670 | 0.5441 | 0.9570 |
64
+ | 0.0797 | 1.7462 | 3000 | 0.1483 | 0.5544 | 0.5787 | 0.5663 | 0.9586 |
65
+ | 0.0709 | 2.0373 | 3500 | 0.1547 | 0.5766 | 0.5763 | 0.5764 | 0.9603 |
66
+ | 0.0471 | 2.3283 | 4000 | 0.1697 | 0.5880 | 0.5657 | 0.5767 | 0.9610 |
67
+ | 0.0456 | 2.6193 | 4500 | 0.1599 | 0.5935 | 0.6014 | 0.5974 | 0.9611 |
68
+ | 0.0453 | 2.9104 | 5000 | 0.1579 | 0.5927 | 0.6063 | 0.5994 | 0.9623 |
69
+ | 0.0312 | 3.2014 | 5500 | 0.1729 | 0.6139 | 0.6103 | 0.6121 | 0.9630 |
70
+ | 0.0269 | 3.4924 | 6000 | 0.1828 | 0.5998 | 0.6096 | 0.6047 | 0.9619 |
71
+ | 0.0274 | 3.7835 | 6500 | 0.1853 | 0.6104 | 0.5940 | 0.6021 | 0.9626 |
72
+ | 0.0245 | 4.0745 | 7000 | 0.1989 | 0.5772 | 0.5986 | 0.5877 | 0.9609 |
73
+ | 0.0153 | 4.3655 | 7500 | 0.2130 | 0.6058 | 0.6055 | 0.6057 | 0.9623 |
74
+ | 0.0174 | 4.6566 | 8000 | 0.1978 | 0.6002 | 0.6005 | 0.6004 | 0.9620 |
75
+ | 0.0179 | 4.9476 | 8500 | 0.2038 | 0.5669 | 0.6331 | 0.5982 | 0.9610 |
76
+ | 0.0113 | 5.2386 | 9000 | 0.2109 | 0.5876 | 0.6327 | 0.6093 | 0.9617 |
77
+ | 0.0104 | 5.5297 | 9500 | 0.2223 | 0.5911 | 0.6080 | 0.5994 | 0.9622 |
78
+ | 0.0115 | 5.8207 | 10000 | 0.2274 | 0.6097 | 0.6011 | 0.6053 | 0.9619 |
79
+ | 0.0098 | 6.1118 | 10500 | 0.2458 | 0.6230 | 0.5838 | 0.6028 | 0.9623 |
80
+ | 0.0067 | 6.4028 | 11000 | 0.2512 | 0.6039 | 0.6001 | 0.6020 | 0.9625 |
81
+ | 0.0077 | 6.6938 | 11500 | 0.2492 | 0.6103 | 0.5977 | 0.6039 | 0.9625 |
82
+ | 0.0079 | 6.9849 | 12000 | 0.2522 | 0.6054 | 0.6019 | 0.6037 | 0.9615 |
83
+ | 0.0054 | 7.2759 | 12500 | 0.2577 | 0.5999 | 0.6168 | 0.6082 | 0.9618 |
84
+ | 0.0056 | 7.5669 | 13000 | 0.2564 | 0.6114 | 0.6025 | 0.6069 | 0.9625 |
85
+ | 0.0058 | 7.8580 | 13500 | 0.2621 | 0.6105 | 0.5881 | 0.5991 | 0.9611 |
86
+ | 0.0051 | 8.1490 | 14000 | 0.2644 | 0.6069 | 0.6051 | 0.6060 | 0.9623 |
87
+ | 0.0041 | 8.4400 | 14500 | 0.2655 | 0.5858 | 0.6115 | 0.5983 | 0.9607 |
88
+ | 0.0045 | 8.7311 | 15000 | 0.2714 | 0.6130 | 0.6035 | 0.6082 | 0.9629 |
89
+ | 0.0041 | 9.0221 | 15500 | 0.2741 | 0.5988 | 0.6128 | 0.6057 | 0.9621 |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
 
91
 
92
  ### Framework versions
eval_result_ner.json CHANGED
@@ -1 +1 @@
1
- {"ceb_gja": {"precision": 0.26785714285714285, "recall": 0.6122448979591837, "f1": 0.3726708074534162, "accuracy": 0.9173745173745174}, "en_pud": {"precision": 0.5027322404371585, "recall": 0.42790697674418604, "f1": 0.4623115577889447, "accuracy": 0.9496127691726483}, "de_pud": {"precision": 0.13111962537249894, "recall": 0.2964388835418672, "f1": 0.18181818181818182, "accuracy": 0.8523744784585814}, "pt_pud": {"precision": 0.5862068965517241, "recall": 0.556869881710646, "f1": 0.5711619225384974, "accuracy": 0.9611654633229376}, "ru_pud": {"precision": 0.01607084289931125, "recall": 0.0472972972972973, "f1": 0.023990208078335372, "accuracy": 0.7374838543012141}, "sv_pud": {"precision": 0.5384615384615384, "recall": 0.36054421768707484, "f1": 0.4318975552968568, "accuracy": 0.9472111553784861}, "tl_trg": {"precision": 0.2727272727272727, "recall": 0.6521739130434783, "f1": 0.38461538461538464, "accuracy": 0.9373297002724795}, "tl_ugnayan": {"precision": 0.07, "recall": 0.21212121212121213, "f1": 0.10526315789473686, "accuracy": 0.8842297174111212}, "zh_gsd": {"precision": 0.5868421052631579, "recall": 0.5814863102998696, "f1": 0.5841519318926, "accuracy": 0.9460539460539461}, "zh_gsdsimp": {"precision": 0.59254327563249, "recall": 0.583224115334207, "f1": 0.5878467635402906, "accuracy": 0.944971694971695}, "hr_set": {"precision": 0.7613314447592068, "recall": 0.7662152530292231, "f1": 0.7637655417406749, "accuracy": 0.9730008244023083}, "da_ddt": {"precision": 0.6598984771573604, "recall": 0.5816554809843401, "f1": 0.6183115338882283, "accuracy": 0.9730619574977551}, "en_ewt": {"precision": 0.6018614270941055, "recall": 0.5349264705882353, "f1": 0.5664233576642335, "accuracy": 0.959716300753078}, "pt_bosque": {"precision": 0.6563285834031852, "recall": 0.6444444444444445, "f1": 0.6503322259136213, "accuracy": 0.9680118823358933}, "sr_set": {"precision": 0.8122786304604487, "recall": 0.8122786304604487, "f1": 0.8122786304604487, "accuracy": 0.9725943437527362}, "sk_snk": {"precision": 0.4213740458015267, "recall": 0.3016393442622951, "f1": 0.3515923566878981, "accuracy": 0.9186557788944724}, "sv_talbanken": {"precision": 0.7159090909090909, "recall": 0.6428571428571429, "f1": 0.6774193548387097, "accuracy": 0.9941110075084654}}
 
1
+ {"ceb_gja": {"precision": 0.35802469135802467, "recall": 0.5918367346938775, "f1": 0.4461538461538461, "accuracy": 0.9420849420849421}, "en_pud": {"precision": 0.47845303867403316, "recall": 0.4027906976744186, "f1": 0.4373737373737374, "accuracy": 0.9488099735549679}, "de_pud": {"precision": 0.11877828054298642, "recall": 0.30317613089509143, "f1": 0.17068545109726357, "accuracy": 0.8338568280905724}, "pt_pud": {"precision": 0.584958217270195, "recall": 0.5732484076433121, "f1": 0.579044117647059, "accuracy": 0.9621480753620711}, "ru_pud": {"precision": 0.01828110161443495, "recall": 0.07432432432432433, "f1": 0.02934451219512195, "accuracy": 0.6327563936967192}, "sv_pud": {"precision": 0.5242566510172144, "recall": 0.32555879494655005, "f1": 0.40167865707434053, "accuracy": 0.9453239672887398}, "tl_trg": {"precision": 0.20967741935483872, "recall": 0.5652173913043478, "f1": 0.30588235294117644, "accuracy": 0.9237057220708447}, "tl_ugnayan": {"precision": 0.05263157894736842, "recall": 0.12121212121212122, "f1": 0.07339449541284404, "accuracy": 0.9051959890610757}, "zh_gsd": {"precision": 0.5702179176755447, "recall": 0.6140808344198174, "f1": 0.591337099811676, "accuracy": 0.9468864468864469}, "zh_gsdsimp": {"precision": 0.5624227441285538, "recall": 0.5963302752293578, "f1": 0.5788804071246818, "accuracy": 0.9459706959706959}, "hr_set": {"precision": 0.7391304347826086, "recall": 0.7633642195295794, "f1": 0.7510518934081346, "accuracy": 0.9725474031327288}, "da_ddt": {"precision": 0.6384615384615384, "recall": 0.5570469798657718, "f1": 0.5949820788530465, "accuracy": 0.9722637932754664}, "en_ewt": {"precision": 0.6191489361702127, "recall": 0.5349264705882353, "f1": 0.57396449704142, "accuracy": 0.9612304259473243}, "pt_bosque": {"precision": 0.6318367346938776, "recall": 0.6370370370370371, "f1": 0.6344262295081967, "accuracy": 0.9668164034197942}, "sr_set": {"precision": 0.7838452787258248, "recall": 0.8134592680047226, "f1": 0.7983777520278099, "accuracy": 0.9711058576306804}, "sk_snk": {"precision": 0.4550989345509893, "recall": 0.326775956284153, "f1": 0.38040712468193383, "accuracy": 0.9251727386934674}, "sv_talbanken": {"precision": 0.6685393258426966, "recall": 0.6071428571428571, "f1": 0.6363636363636364, "accuracy": 0.9935711831967414}}
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:94c798d33249bd160639357d57ccdb1a59f3a5b1a1298ea3c4b88c466cb4bcc5
3
  size 942800188
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ed1fa0e46fdb82c0c49bf7037b3a9b98bed6b1cc822ded7f0e5c6de5992571a
3
  size 942800188
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0660f7adcec985c2b6cc9209ba76316087b5b519afd5f5266cf9963ef1d585e2
3
  size 5304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e6d85b99c71e32ccec3552dc8e029f4fd58d303a617fd5a8702630e82a81fbe
3
  size 5304