asahi417's picture
add model
0d84d48
raw
history blame
8.21 kB
{
"_name_or_path": "deberta-v3-large-bionlp2004",
"architectures": [
"DebertaV2ForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"crf_state_dict": {
"_constraint_mask": [
[
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
0.0,
1.0
],
[
1.0,
1.0,
0.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
0.0,
1.0
],
[
1.0,
1.0,
0.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
0.0,
1.0
],
[
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
1.0
],
[
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
1.0
],
[
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0
],
[
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0
],
[
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
0.0,
0.0
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
]
],
"end_transitions": [
-0.7466364502906799,
-1.5597225427627563,
1.3677676916122437,
-1.9478734731674194,
-0.0943344309926033,
-1.481752872467041,
0.04468855261802673,
0.061569295823574066,
0.5608620643615723,
1.9259264469146729,
0.11314122378826141
],
"start_transitions": [
-0.09945586323738098,
-1.850279450416565,
0.31994107365608215,
0.8134310245513916,
-1.1494208574295044,
-1.4835879802703857,
-0.3574755787849426,
-1.2984637022018433,
-0.9591480493545532,
0.18245083093643188,
-0.23107917606830597
],
"transitions": [
[
-0.16309909522533417,
-0.5502283573150635,
-0.3180482089519501,
0.15801210701465607,
0.1563963145017624,
0.4807545840740204,
0.18062275648117065,
0.42859020829200745,
0.18801428377628326,
0.16369004547595978,
0.012216882780194283
],
[
0.2011071890592575,
-0.31737032532691956,
-0.054657403379678726,
-0.14687399566173553,
0.10582231730222702,
0.42548516392707825,
0.04223101586103439,
-0.22696508467197418,
-0.19285674393177032,
-0.045819517225027084,
0.5714722871780396
],
[
-0.2141653150320053,
0.43638932704925537,
0.07841236889362335,
-0.3898158073425293,
-0.461143434047699,
0.251681387424469,
-0.051315125077962875,
0.432632178068161,
0.27035513520240784,
-0.25483834743499756,
-0.25554975867271423
],
[
0.10120851546525955,
-0.588416576385498,
-0.08458846062421799,
-0.3802986145019531,
-0.0005335402674973011,
-0.01627517119050026,
-0.16827818751335144,
-0.4539572596549988,
0.09900970011949539,
0.12087426334619522,
-0.524318277835846
],
[
-0.07068424671888351,
-0.08015620708465576,
0.09249727427959442,
-0.033255185931921005,
-0.28505897521972656,
-0.026000550016760826,
0.04233122617006302,
-0.02459520660340786,
-0.12352070212364197,
-0.3135007917881012,
-0.2119021862745285
],
[
0.1064605638384819,
0.03908783942461014,
0.10062617063522339,
-0.13202111423015594,
-0.12569193542003632,
-0.47523266077041626,
0.026902323588728905,
-0.014480818063020706,
0.5379728674888611,
-0.6075968146324158,
-0.27925530076026917
],
[
0.30935919284820557,
0.07643910497426987,
0.5628833174705505,
0.30425673723220825,
-0.01219983957707882,
-0.426324725151062,
-0.4403841495513916,
0.18429414927959442,
-0.2356564998626709,
-0.004185007885098457,
0.6030216217041016
],
[
0.2363874614238739,
-0.28106313943862915,
-0.20492428541183472,
-0.004136255476623774,
0.289993017911911,
-0.13118140399456024,
-0.10084224492311478,
0.8562389612197876,
-0.22430521249771118,
0.3383093774318695,
-0.28914186358451843
],
[
0.11979195475578308,
-0.11795517057180405,
-0.5248165726661682,
0.09150447696447372,
0.5337616801261902,
0.44269436597824097,
-0.4343547821044922,
0.17205286026000977,
-0.31054869294166565,
0.38092467188835144,
0.2918209433555603
],
[
0.3333682715892792,
-0.41180089116096497,
-0.33359599113464355,
0.4375508725643158,
-0.026997027918696404,
0.17577354609966278,
0.3142207860946655,
-0.07492615282535553,
-0.25847935676574707,
0.48999741673469543,
-0.16481833159923553
],
[
-0.18405911326408386,
0.2880489230155945,
0.0862211287021637,
-0.39804908633232117,
0.2513324022293091,
0.30213290452957153,
0.1442955583333969,
-0.10511966794729233,
-0.37969374656677246,
0.14425943791866302,
-0.26886945962905884
]
]
},
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"id2label": {
"0": "O",
"1": "B-DNA",
"2": "I-DNA",
"3": "B-protein",
"4": "I-protein",
"5": "B-cell_type",
"6": "I-cell_type",
"7": "B-cell_line",
"8": "I-cell_line",
"9": "B-RNA",
"10": "I-RNA"
},
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"B-DNA": 1,
"B-RNA": 9,
"B-cell_line": 7,
"B-cell_type": 5,
"B-protein": 3,
"I-DNA": 2,
"I-RNA": 10,
"I-cell_line": 8,
"I-cell_type": 6,
"I-protein": 4,
"O": 0
},
"layer_norm_eps": 1e-07,
"max_position_embeddings": 512,
"max_relative_positions": -1,
"model_type": "deberta-v2",
"norm_rel_ebd": "layer_norm",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 0,
"pooler_dropout": 0,
"pooler_hidden_act": "gelu",
"pooler_hidden_size": 1024,
"pos_att_type": [
"p2c",
"c2p"
],
"position_biased_input": false,
"position_buckets": 256,
"relative_attention": true,
"share_att_key": true,
"torch_dtype": "float32",
"transformers_version": "4.11.3",
"type_vocab_size": 0,
"vocab_size": 128100
}