TorchMoji / config.json
Pendrokar's picture
distilbert => bert; distilbert didn't exist
d53cfe9 verified
raw
history blame
No virus
3.67 kB
{
"_name_or_path": "bert-base-uncased",
"activation": "gelu",
"architectures": [
"BertForMultilabelSequenceClassification"
],
"attention_dropout": 0.1,
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"dim": 768,
"dropout": 0.1,
"hidden_act": "gelu",
"hidden_dim": 3072,
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0":"πŸ˜‚",
"1":"πŸ˜’",
"2":"😩",
"3":"😭",
"4":"😍",
"5":"πŸ˜”",
"6":"πŸ‘Œ",
"7":"😊",
"8":"❀",
"9":"😏",
"10":"😁",
"11":"🎢",
"12":"😳",
"13":"πŸ’―",
"14":"😴",
"15":"😌",
"16":"☺",
"17":"πŸ™Œ",
"18":"πŸ’•",
"19":"πŸ˜‘",
"20":"πŸ˜…",
"21":"πŸ™",
"22":"πŸ˜•",
"23":"😘",
"24":"β™₯",
"25":"😐",
"26":"πŸ’",
"27":"😞",
"28":"πŸ™ˆ",
"29":"😫",
"30":"✌",
"31":"😎",
"32":"😑",
"33":"πŸ‘",
"34":"😒",
"35":"πŸ˜ͺ",
"36":"πŸ˜‹",
"37":"😀",
"38":"βœ‹",
"39":"😷",
"40":"πŸ‘",
"41":"πŸ‘€",
"42":"πŸ”«",
"43":"😣",
"44":"😈",
"45":"πŸ˜“",
"46":"πŸ’”",
"47":"β™‘",
"48":"🎧",
"49":"πŸ™Š",
"50":"πŸ˜‰",
"51":"πŸ’€",
"52":"πŸ˜–",
"53":"πŸ˜„",
"54":"😜",
"55":"😠",
"56":"πŸ™…",
"57":"πŸ’ͺ",
"58":"πŸ‘Š",
"59":"πŸ’œ",
"60":"πŸ’–",
"61":"πŸ’™",
"62":"😬",
"63":"✨"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"πŸ˜‚": "0",
"πŸ˜’": "1",
"😩": "2",
"😭": "3",
"😍": "4",
"πŸ˜”": "5",
"πŸ‘Œ": "6",
"😊": "7",
"❀": "8",
"😏": "9",
"😁":"10",
"🎢":"11",
"😳":"12",
"πŸ’―":"13",
"😴":"14",
"😌":"15",
"☺":"16",
"πŸ™Œ":"17",
"πŸ’•":"18",
"πŸ˜‘":"19",
"πŸ˜…":"20",
"πŸ™":"21",
"πŸ˜•":"22",
"😘":"23",
"β™₯":"24",
"😐":"25",
"πŸ’":"26",
"😞":"27",
"πŸ™ˆ":"28",
"😫":"29",
"✌":"30",
"😎":"31",
"😑":"32",
"πŸ‘":"33",
"😒":"34",
"πŸ˜ͺ":"35",
"πŸ˜‹":"36",
"😀":"37",
"βœ‹":"38",
"😷":"39",
"πŸ‘":"40",
"πŸ‘€":"41",
"πŸ”«":"42",
"😣":"43",
"😈":"44",
"πŸ˜“":"45",
"πŸ’”":"46",
"β™‘":"47",
"🎧":"48",
"πŸ™Š":"49",
"πŸ˜‰":"50",
"πŸ’€":"51",
"πŸ˜–":"52",
"πŸ˜„":"53",
"😜":"54",
"😠":"55",
"πŸ™…":"56",
"πŸ’ͺ":"57",
"πŸ‘Š":"58",
"πŸ’œ":"59",
"πŸ’–":"60",
"πŸ’™":"61",
"😬":"62",
"✨":"63"
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"n_heads": 12,
"n_layers": 6,
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
"sinusoidal_pos_embds": false,
"tie_weights_": true,
"torch_dtype": "float32",
"transformers_version": "4.12.5",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 50000
}