asahi417 commited on
Commit
49c8f35
1 Parent(s): 0f0b045

model update

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. pytorch_model.bin +2 -2
  3. tokenizer_config.json +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "cner_output/model/baseline_2021/roberta_base_dec2021/best_model",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "cner_output/model/baseline_2021/roberta_base_dec2021/model_mwzvua/epoch_10",
3
  "architectures": [
4
  "RobertaForTokenClassification"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cbd7a0b660a7c9a9d93a910fa12561928514631a3d3e55101203302f60e36cee
3
- size 496349169
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd3be503a049f76525c5caea00945de8dabb19367e2aedfbfd4e656fab0b02b6
3
+ size 496351921
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cner_output/model/baseline_2021/roberta_base_dec2021/best_model", "tokenizer_class": "RobertaTokenizer"}
 
1
+ {"errors": "replace", "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "add_prefix_space": false, "trim_offsets": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cner_output/model/baseline_2021/roberta_base_dec2021/model_mwzvua/epoch_10", "tokenizer_class": "RobertaTokenizer"}