shaoyent's picture
Update config.json
323700b
raw
history blame
1.16 kB
{
"drop_rate":0.1,
"freeze_RoBERTa":false,
"freeze_ViT":false,
"freeze_layer_count_roberta":false,
"freeze_layer_count_vit":false,
"head_hidden_scale":2,
"hidden_act":"gelu",
"hidden_size":768,
"image_size":288,
"input_text_embed_size":768,
"input_image_embed_size":768,
"is_encoder_decoder":false,
"layer_norm_eps":1e-5,
"link_tower_shared":false,
"link_tower_type":"add",
"max_text_len":50,
"mlp_ratio":4,
"num_attention_heads":12,
"num_hidden_layers":6,
"resolution_before":224,
"stop_gradient":false,
"task_head_layers":2,
"tie_word_embeddings":false,
"tokenizer":"roberta-base",
"unfreeze_RoBERTa_attention":false,
"unfreeze_RoBERTa_embeddings":false,
"unfreeze_RoBERTa_encoder":false,
"unfreeze_RoBERTa_layernorm":false,
"unfreeze_ViT_attention":false,
"unfreeze_ViT_layernorm":false,
"vit_embed_dim":512,
"vit_layers":12,
"vit_layernorm_init_from_vit":false,
"vit_layernorm_shared":true,
"vit_patch_size":16,
"vit_remove_last":false,
"vit_transformer_width":512,
"vit_width":768,
"vocab_size":50265
}