fan_tiny_12_p16_224 / config.json
ksmcg's picture
Upload config
df7a388
raw
history blame
1.15 kB
{
"act_layer": null,
"attn_drop_rate": 0.0,
"backbone": null,
"c_head_num": null,
"channel_dims": null,
"channels": 256,
"cls_attn_layers": 2,
"decoder_hidden_size": 768,
"depth": 12,
"depths": null,
"drop_path_rate": 0.0,
"drop_rate": 0.0,
"dropout_ratio": 0.1,
"embed_dim": 192,
"eta": 1.0,
"feat_downsample": false,
"feature_strides": [
4,
8,
16,
32
],
"hybrid_patch_size": 2,
"img_size": [
224,
224
],
"in_channels": [
128,
256,
480,
480
],
"in_chans": 3,
"in_index": [
0,
1,
2,
3
],
"initializer_range": 1.0,
"mlp_ratio": 4.0,
"model_type": "fan",
"norm_layer": null,
"num_classes": 1000,
"num_heads": 4,
"out_index": -1,
"patch_size": 16,
"qkv_bias": true,
"reshape_last_stage": false,
"rounding_mode": "floor",
"se_mlp": false,
"semantic_loss_ignore_index": -100,
"sharpen_attn": false,
"sr_ratio": [
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1
],
"tokens_norm": true,
"transformers_version": "4.22.0.dev0",
"use_checkpoint": false,
"use_pos_embed": true
}