|
{ |
|
"_name_or_path": "AutoTrain", |
|
"_num_labels": 58, |
|
"architectures": [ |
|
"RobertaForSequenceClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"bos_token_id": 0, |
|
"classifier_dropout": null, |
|
"eos_token_id": 2, |
|
"gradient_checkpointing": false, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 1024, |
|
"id2label": { |
|
"0": "\uac00\ub09c\ud55c, \ubd88\uc6b0\ud55c", |
|
"1": "\uac10\uc0ac\ud558\ub294", |
|
"2": "\uac71\uc815\uc2a4\ub7ec\uc6b4", |
|
"3": "\uace0\ub9bd\ub41c", |
|
"4": "\uad34\ub85c\uc6cc\ud558\ub294", |
|
"5": "\uad6c\uc5ed\uc9c8 \ub098\ub294", |
|
"6": "\uae30\uc068", |
|
"7": "\ub099\ub2f4\ud55c", |
|
"8": "\ub0a8\uc758 \uc2dc\uc120\uc744 \uc758\uc2dd\ud558\ub294", |
|
"9": "\ub178\uc5ec\uc6cc\ud558\ub294", |
|
"10": "\ub208\ubb3c\uc774 \ub098\ub294", |
|
"11": "\ub290\uae0b", |
|
"12": "\ub2f9\ud639\uc2a4\ub7ec\uc6b4", |
|
"13": "\ub2f9\ud669", |
|
"14": "\ub450\ub824\uc6b4", |
|
"15": "\ub9c8\ube44\ub41c", |
|
"16": "\ub9cc\uc871\uc2a4\ub7ec\uc6b4", |
|
"17": "\ubc29\uc5b4\uc801\uc778", |
|
"18": "\ubc30\uc2e0\ub2f9\ud55c", |
|
"19": "\ubc84\ub824\uc9c4", |
|
"20": "\ubd80\ub044\ub7ec\uc6b4", |
|
"21": "\ubd84\ub178", |
|
"22": "\ubd88\uc548", |
|
"23": "\ube44\ud1b5\ud55c", |
|
"24": "\uc0c1\ucc98", |
|
"25": "\uc131\uac00\uc2e0", |
|
"26": "\uc2a4\ud2b8\ub808\uc2a4 \ubc1b\ub294", |
|
"27": "\uc2ac\ud514", |
|
"28": "\uc2e0\ub8b0\ud558\ub294", |
|
"29": "\uc2e0\uc774 \ub09c", |
|
"30": "\uc2e4\ub9dd\ud55c", |
|
"31": "\uc545\uc758\uc801\uc778", |
|
"32": "\uc548\ub2ec\ud558\ub294", |
|
"33": "\uc548\ub3c4", |
|
"34": "\uc5b5\uc6b8\ud55c", |
|
"35": "\uc5f4\ub4f1\uac10", |
|
"36": "\uc5fc\uc138\uc801\uc778", |
|
"37": "\uc678\ub85c\uc6b4", |
|
"38": "\uc6b0\uc6b8\ud55c", |
|
"39": "\uc790\uc2e0\ud558\ub294", |
|
"40": "\uc870\uc2ec\uc2a4\ub7ec\uc6b4", |
|
"41": "\uc88c\uc808\ud55c", |
|
"42": "\uc8c4\ucc45\uac10\uc758", |
|
"43": "\uc9c8\ud22c\ud558\ub294", |
|
"44": "\uc9dc\uc99d\ub0b4\ub294", |
|
"45": "\ucd08\uc870\ud55c", |
|
"46": "\ucda9\uaca9 \ubc1b\uc740", |
|
"47": "\ucde8\uc57d\ud55c", |
|
"48": "\ud234\ud234\ub300\ub294", |
|
"49": "\ud3b8\uc548\ud55c", |
|
"50": "\ud55c\uc2ec\ud55c", |
|
"51": "\ud610\uc624\uc2a4\ub7ec\uc6b4", |
|
"52": "\ud63c\ub780\uc2a4\ub7ec\uc6b4", |
|
"53": "\ud658\uba78\uc744 \ub290\ub07c\ub294", |
|
"54": "\ud68c\uc758\uc801\uc778", |
|
"55": "\ud6c4\ud68c\ub418\ub294", |
|
"56": "\ud765\ubd84", |
|
"57": "\ud76c\uc0dd\ub41c" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 4096, |
|
"label2id": { |
|
"\uac00\ub09c\ud55c, \ubd88\uc6b0\ud55c": 0, |
|
"\uac10\uc0ac\ud558\ub294": 1, |
|
"\uac71\uc815\uc2a4\ub7ec\uc6b4": 2, |
|
"\uace0\ub9bd\ub41c": 3, |
|
"\uad34\ub85c\uc6cc\ud558\ub294": 4, |
|
"\uad6c\uc5ed\uc9c8 \ub098\ub294": 5, |
|
"\uae30\uc068": 6, |
|
"\ub099\ub2f4\ud55c": 7, |
|
"\ub0a8\uc758 \uc2dc\uc120\uc744 \uc758\uc2dd\ud558\ub294": 8, |
|
"\ub178\uc5ec\uc6cc\ud558\ub294": 9, |
|
"\ub208\ubb3c\uc774 \ub098\ub294": 10, |
|
"\ub290\uae0b": 11, |
|
"\ub2f9\ud639\uc2a4\ub7ec\uc6b4": 12, |
|
"\ub2f9\ud669": 13, |
|
"\ub450\ub824\uc6b4": 14, |
|
"\ub9c8\ube44\ub41c": 15, |
|
"\ub9cc\uc871\uc2a4\ub7ec\uc6b4": 16, |
|
"\ubc29\uc5b4\uc801\uc778": 17, |
|
"\ubc30\uc2e0\ub2f9\ud55c": 18, |
|
"\ubc84\ub824\uc9c4": 19, |
|
"\ubd80\ub044\ub7ec\uc6b4": 20, |
|
"\ubd84\ub178": 21, |
|
"\ubd88\uc548": 22, |
|
"\ube44\ud1b5\ud55c": 23, |
|
"\uc0c1\ucc98": 24, |
|
"\uc131\uac00\uc2e0": 25, |
|
"\uc2a4\ud2b8\ub808\uc2a4 \ubc1b\ub294": 26, |
|
"\uc2ac\ud514": 27, |
|
"\uc2e0\ub8b0\ud558\ub294": 28, |
|
"\uc2e0\uc774 \ub09c": 29, |
|
"\uc2e4\ub9dd\ud55c": 30, |
|
"\uc545\uc758\uc801\uc778": 31, |
|
"\uc548\ub2ec\ud558\ub294": 32, |
|
"\uc548\ub3c4": 33, |
|
"\uc5b5\uc6b8\ud55c": 34, |
|
"\uc5f4\ub4f1\uac10": 35, |
|
"\uc5fc\uc138\uc801\uc778": 36, |
|
"\uc678\ub85c\uc6b4": 37, |
|
"\uc6b0\uc6b8\ud55c": 38, |
|
"\uc790\uc2e0\ud558\ub294": 39, |
|
"\uc870\uc2ec\uc2a4\ub7ec\uc6b4": 40, |
|
"\uc88c\uc808\ud55c": 41, |
|
"\uc8c4\ucc45\uac10\uc758": 42, |
|
"\uc9c8\ud22c\ud558\ub294": 43, |
|
"\uc9dc\uc99d\ub0b4\ub294": 44, |
|
"\ucd08\uc870\ud55c": 45, |
|
"\ucda9\uaca9 \ubc1b\uc740": 46, |
|
"\ucde8\uc57d\ud55c": 47, |
|
"\ud234\ud234\ub300\ub294": 48, |
|
"\ud3b8\uc548\ud55c": 49, |
|
"\ud55c\uc2ec\ud55c": 50, |
|
"\ud610\uc624\uc2a4\ub7ec\uc6b4": 51, |
|
"\ud63c\ub780\uc2a4\ub7ec\uc6b4": 52, |
|
"\ud658\uba78\uc744 \ub290\ub07c\ub294": 53, |
|
"\ud68c\uc758\uc801\uc778": 54, |
|
"\ud6c4\ud68c\ub418\ub294": 55, |
|
"\ud765\ubd84": 56, |
|
"\ud76c\uc0dd\ub41c": 57 |
|
}, |
|
"layer_norm_eps": 1e-05, |
|
"max_length": 192, |
|
"max_position_embeddings": 514, |
|
"model_type": "roberta", |
|
"num_attention_heads": 16, |
|
"num_hidden_layers": 24, |
|
"pad_token_id": 1, |
|
"padding": "max_length", |
|
"position_embedding_type": "absolute", |
|
"problem_type": "single_label_classification", |
|
"tokenizer_class": "BertTokenizer", |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.29.2", |
|
"type_vocab_size": 1, |
|
"use_cache": true, |
|
"vocab_size": 39255 |
|
} |
|
|