hiera_base_224 / config.json
namangarg110's picture
Upload 2 files
0ae70cc verified
raw
history blame
761 Bytes
{
"architectures": [
"HieraModel"
],
"dim_mul": 2.0,
"drop_path_rate": 0.0,
"embedding_dimension": 96,
"head_dropout": 0.0,
"head_init_scale": 0.001,
"head_mul": 2.0,
"in_chans": 3,
"input_size": [
224,
224
],
"mask_unit_attn": [
true,
true,
false,
false
],
"mask_unit_size": [
8,
8
],
"mlp_ratio": 4.0,
"model_type": "hiera",
"num_classes": 1000,
"number_of_heads": 1,
"patch_kernel": [
7,
7
],
"patch_padding": [
3,
3
],
"patch_stride": [
4,
4
],
"q_pool": 3,
"q_stride": [
2,
2
],
"sep_position_embeddings": false,
"stages": [
2,
3,
16,
3
],
"torch_dtype": "float32",
"transformers_version": "4.38.0.dev0"
}