anahita-b commited on
Commit
cab3e00
1 Parent(s): b617506

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -6
config.json CHANGED
@@ -4,18 +4,15 @@
4
  "head_hidden_scale": 2,
5
  "hidden_act": "gelu",
6
  "hidden_size": 768,
7
- "input_image_embed_size": 768,
8
- "input_text_embed_size": 768,
9
  "is_encoder_decoder": false,
10
  "layer_norm_eps": 1e-05,
11
  "share_link_tower_layers": false,
12
  "link_tower_type": "add",
13
- "max_text_len": 50,
14
- "mlp_ratio": 4,
15
  "num_attention_heads": 12,
16
  "num_hidden_layers": 6,
17
  "tie_word_embeddings": false,
18
  "text_config_dict": null,
 
19
  "text_config":{
20
  "architectures": ["BridgeTowerTextModel"],
21
  "vocab_size": 50265,
@@ -46,7 +43,6 @@
46
  "image_size": 288,
47
  "stop_gradient": false,
48
  "share_layernorm": true,
49
- "vit_remove_last": false,
50
- "init_layernorm_from_vit": false
51
  }
52
  }
 
4
  "head_hidden_scale": 2,
5
  "hidden_act": "gelu",
6
  "hidden_size": 768,
 
 
7
  "is_encoder_decoder": false,
8
  "layer_norm_eps": 1e-05,
9
  "share_link_tower_layers": false,
10
  "link_tower_type": "add",
 
 
11
  "num_attention_heads": 12,
12
  "num_hidden_layers": 6,
13
  "tie_word_embeddings": false,
14
  "text_config_dict": null,
15
+ "init_layernorm_from_vision_encoder": false,
16
  "text_config":{
17
  "architectures": ["BridgeTowerTextModel"],
18
  "vocab_size": 50265,
 
43
  "image_size": 288,
44
  "stop_gradient": false,
45
  "share_layernorm": true,
46
+ "remove_last_layer": false,
 
47
  }
48
  }