VLP_singleLED-model / config.json
Quaouar's picture
Update config.json
5d5f75a
raw
history blame
789 Bytes
{
"architectures": [
"VLP_singleLED-model"
],
"attention_probs_dropout_prob": 0.5,
"gradient_checkpointing": false,
"hidden_act": "relu",
"hidden_dropout_prob": 0.5,
"hidden_size": 6272,
"initializer_range": 0.02,
"intermediate_size": 512,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "RMSprop",
"num_attention_heads": 12,
"num_hidden_layers": 512,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"transformers_version": "4.6.0.dev0",
"type_vocab_size": 2,
"use_cache": true,
"clases": {
"1": "LED11",
"2": "LED12",
"3": "LED13",
"4": "LED14",
"5": "LED3",
"6": "LED4",
"7": "LED7",
"8": "LED8"
},
"num_channels": 3
}