paligemma / config.json
sylvan54's picture
Update config.json
587a247 verified
raw
history blame contribute delete
787 Bytes
{
"model_type": "vision-encoder-decoder",
"encoder": {
"model_type": "paligemma",
"architecture": "paligemma-3b",
"image_size": [224, 224],
"input_size": [224, 224],
"num_hidden_layers": 12,
"num_attention_heads": 16,
"hidden_size": 1024,
"patch_size": 16,
"dropout": 0.1
},
"decoder": {
"model_type": "bert",
"vocab_size": 30522,
"max_position_embeddings": 512,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"hidden_size": 768,
"initializer_range": 0.02,
"layer_norm_eps": 1e-12,
"hidden_act": "gelu",
"attention_probs_dropout_prob": 0.1,
"hidden_dropout_prob": 0.1
},
"max_length": 64,
"decoder_start_token_id": 101,
"pad_token_id": 0,
"eos_token_id": 102,
"bos_token_id": 101
}