fan_small_12_p4_hybrid / config.json
ksmcg's picture
Upload config
eea2b1a
raw
history blame
1.23 kB
{
"act_layer": null,
"attn_drop_rate": 0.0,
"backbone": "hybrid",
"channel_dims": [
384,
384,
384,
384,
384,
384,
384,
384,
384,
384
],
"channels": 256,
"cls_attn_layers": 2,
"decoder_dropout": 0.1,
"decoder_hidden_size": 768,
"depths": [
3,
3
],
"dims": [
128,
256,
512,
1024
],
"drop_path_rate": 0.0,
"drop_rate": 0.0,
"eta": 1.0,
"feat_downsample": false,
"feature_strides": [
4,
8,
16,
32
],
"hidden_size": 384,
"hybrid_patch_size": 2,
"img_size": [
224,
224
],
"in_index": [
0,
1,
2,
3
],
"initializer_range": 1.0,
"mlp_ratio": 4.0,
"model_type": "fan",
"norm_layer": null,
"num_attention_heads": 8,
"num_channels": 3,
"num_hidden_layers": 10,
"out_index": 9,
"patch_size": 16,
"qkv_bias": true,
"reshape_last_stage": false,
"rounding_mode": "floor",
"se_mlp": false,
"segmentation_in_channels": [
128,
256,
384,
384
],
"semantic_loss_ignore_index": -100,
"sharpen_attn": false,
"tokens_norm": true,
"transformers_version": "4.24.0.dev0",
"use_checkpoint": false,
"use_head": false,
"use_pos_embed": true
}