|
{ |
|
"architectures": [ |
|
"BertForTokenClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"directionality": "bidi", |
|
"gradient_checkpointing": false, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 768, |
|
"id2label": { |
|
"0": "B-AUX", |
|
"1": "B-DET", |
|
"2": "I-VERB", |
|
"3": "AUX", |
|
"4": "B-SCONJ", |
|
"5": "ADV", |
|
"6": "I-PUNCT", |
|
"7": "I-DET", |
|
"8": "NUM", |
|
"9": "B-ADJ", |
|
"10": "I-ADJ", |
|
"11": "NOUN", |
|
"12": "B-PROPN", |
|
"13": "B-CCONJ", |
|
"14": "I-ADP", |
|
"15": "PRON", |
|
"16": "ADJ", |
|
"17": "ADP", |
|
"18": "B-VERB", |
|
"19": "B-PUNCT", |
|
"20": "B-PRON", |
|
"21": "I-PROPN", |
|
"22": "I-PART", |
|
"23": "PART", |
|
"24": "I-NUM", |
|
"25": "DET", |
|
"26": "I-CCONJ", |
|
"27": "B-ADP", |
|
"28": "PUNCT", |
|
"29": "B-NOUN", |
|
"30": "CCONJ", |
|
"31": "B-NUM", |
|
"32": "PROPN", |
|
"33": "I-PRON", |
|
"34": "SYM", |
|
"35": "VERB", |
|
"36": "B-PART", |
|
"37": "I-AUX", |
|
"38": "I-NOUN", |
|
"39": "B-ADV", |
|
"40": "I-SCONJ", |
|
"41": "I-ADV" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 3072, |
|
"label2id": { |
|
"ADJ": 16, |
|
"ADP": 17, |
|
"ADV": 5, |
|
"AUX": 3, |
|
"B-ADJ": 9, |
|
"B-ADP": 27, |
|
"B-ADV": 39, |
|
"B-AUX": 0, |
|
"B-CCONJ": 13, |
|
"B-DET": 1, |
|
"B-NOUN": 29, |
|
"B-NUM": 31, |
|
"B-PART": 36, |
|
"B-PRON": 20, |
|
"B-PROPN": 12, |
|
"B-PUNCT": 19, |
|
"B-SCONJ": 4, |
|
"B-VERB": 18, |
|
"CCONJ": 30, |
|
"DET": 25, |
|
"I-ADJ": 10, |
|
"I-ADP": 14, |
|
"I-ADV": 41, |
|
"I-AUX": 37, |
|
"I-CCONJ": 26, |
|
"I-DET": 7, |
|
"I-NOUN": 38, |
|
"I-NUM": 24, |
|
"I-PART": 22, |
|
"I-PRON": 33, |
|
"I-PROPN": 21, |
|
"I-PUNCT": 6, |
|
"I-SCONJ": 40, |
|
"I-VERB": 2, |
|
"NOUN": 11, |
|
"NUM": 8, |
|
"PART": 23, |
|
"PRON": 15, |
|
"PROPN": 32, |
|
"PUNCT": 28, |
|
"SYM": 34, |
|
"VERB": 35 |
|
}, |
|
"layer_norm_eps": 1e-12, |
|
"max_position_embeddings": 512, |
|
"model_type": "bert", |
|
"num_attention_heads": 12, |
|
"num_hidden_layers": 12, |
|
"pad_token_id": 0, |
|
"pooler_fc_size": 768, |
|
"pooler_num_attention_heads": 12, |
|
"pooler_num_fc_layers": 3, |
|
"pooler_size_per_head": 128, |
|
"pooler_type": "first_token_transform", |
|
"position_embedding_type": "absolute", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.9.2", |
|
"type_vocab_size": 2, |
|
"use_cache": true, |
|
"vocab_size": 8506 |
|
} |
|
|