asahi417 commited on
Commit
5a51baf
·
1 Parent(s): bfa4ae1

model update

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "tner_ckpt/wnut2017_deberta_v3_large/best_model",
3
  "architectures": [
4
  "DebertaV2ForTokenClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "tner_ckpt/wnut2017_deberta_large/model_ulfllg/epoch_5",
3
  "architectures": [
4
  "DebertaV2ForTokenClassification"
5
  ],
eval/metric.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"micro/f1": 0.5047353760445682, "micro/f1_ci": {"90": [0.4752384997212858, 0.5329114690850492], "95": [0.46929053844001617, 0.537282841423422]}, "micro/recall": 0.4198331788693234, "micro/precision": 0.63268156424581, "macro/f1": 0.4165125500830091, "macro/f1_ci": {"90": [0.38706770098595766, 0.4460817969731918], "95": [0.3822097912028253, 0.449996955217276]}, "macro/recall": 0.3573954549633822, "macro/precision": 0.5356144444686111, "per_entity_metric": {"corporation": {"f1": 0.25477707006369427, "f1_ci": {"90": [0.1783308611813978, 0.3278688524590164], "95": [0.1621522257551669, 0.34150671092509755]}, "precision": 0.21978021978021978, "recall": 0.30303030303030304}, "group": {"f1": 0.34309623430962344, "f1_ci": {"90": [0.2621295628948149, 0.4177979797979797], "95": [0.24770188027977105, 0.43197013405735635]}, "precision": 0.5540540540540541, "recall": 0.24848484848484848}, "location": {"f1": 0.6187050359712232, "f1_ci": {"90": [0.5522723993651708, 0.6811594202898551], "95": [0.5407357357357359, 0.6968641114982578]}, "precision": 0.671875, "recall": 0.5733333333333334}, "person": {"f1": 0.6721763085399448, "f1_ci": {"90": [0.632193895384893, 0.7077770054832908], "95": [0.6262292487956584, 0.7144847678929487]}, "precision": 0.8215488215488216, "recall": 0.5687645687645687}, "product": {"f1": 0.18579234972677597, "f1_ci": {"90": [0.11764705882352942, 0.250065445026178], "95": [0.10585157318741445, 0.26747624406101533]}, "precision": 0.30357142857142855, "recall": 0.13385826771653545}, "work_of_art": {"f1": 0.42452830188679247, "f1_ci": {"90": [0.3559713745778989, 0.49746445437785686], "95": [0.3403931544865864, 0.5150418503485911]}, "precision": 0.6428571428571429, "recall": 0.31690140845070425}}}
eval/metric_span.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"micro/f1": 0.6249999999999999, "micro/f1_ci": {"90": [0.5977164217334618, 0.6503884931572228], "95": [0.5920284319228305, 0.6550446185614259]}, "micro/recall": 0.5143651529193698, "micro/precision": 0.7962697274031564, "macro/f1": 0.6249999999999999, "macro/f1_ci": {"90": [0.5977164217334618, 0.6503884931572228], "95": [0.5920284319228305, 0.6550446185614259]}, "macro/recall": 0.5143651529193698, "macro/precision": 0.7962697274031564}
eval/prediction.validation.json ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cd25ecf2f9de84df2f277628834ec098f8598b173ed5b3f83ddc37e0ec69695c
3
- size 1736233903
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8e660f4e6df99d5522db22b282d4ee5909a0486e908f1929f93312f00705206
3
+ size 1736239407
tokenizer_config.json CHANGED
@@ -4,7 +4,7 @@
4
  "do_lower_case": false,
5
  "eos_token": "[SEP]",
6
  "mask_token": "[MASK]",
7
- "name_or_path": "tner_ckpt/wnut2017_deberta_v3_large/best_model",
8
  "pad_token": "[PAD]",
9
  "sep_token": "[SEP]",
10
  "sp_model_kwargs": {},
 
4
  "do_lower_case": false,
5
  "eos_token": "[SEP]",
6
  "mask_token": "[MASK]",
7
+ "name_or_path": "tner_ckpt/wnut2017_deberta_large/model_ulfllg/epoch_5",
8
  "pad_token": "[PAD]",
9
  "sep_token": "[SEP]",
10
  "sp_model_kwargs": {},
trainer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"dataset": ["tner/wnut2017"], "dataset_split": "train", "dataset_name": null, "local_dataset": null, "model": "microsoft/deberta-v3-large", "crf": false, "max_length": 128, "epoch": 15, "batch_size": 16, "lr": 1e-05, "random_seed": 42, "gradient_accumulation_steps": 4, "weight_decay": 1e-07, "lr_warmup_step_ratio": 0.1, "max_grad_norm": 10.0}