ksmcg commited on
Commit
f36c8d1
1 Parent(s): b7406b4

Upload config

Browse files
Files changed (1) hide show
  1. config.json +6 -11
config.json CHANGED
@@ -1,8 +1,4 @@
1
  {
2
- "_name_or_path": "ksmcg/fan_tiny_8_p4_hybrid",
3
- "architectures": [
4
- "FANForImageClassification"
5
- ],
6
  "attention_probs_dropout_prob": 0.0,
7
  "backbone": "hybrid",
8
  "channel_dims": null,
@@ -14,12 +10,6 @@
14
  3,
15
  3
16
  ],
17
- "dims": [
18
- 128,
19
- 256,
20
- 512,
21
- 1024
22
- ],
23
  "drop_path_rate": 0.0,
24
  "eta": 1.0,
25
  "feat_downsample": false,
@@ -32,6 +22,12 @@
32
  "hidden_act": "gelu",
33
  "hidden_dropout_prob": 0.0,
34
  "hidden_size": 192,
 
 
 
 
 
 
35
  "hybrid_patch_size": 2,
36
  "id2label": {
37
  "0": "tench, Tinca tinca",
@@ -1067,7 +1063,6 @@
1067
  "semantic_loss_ignore_index": -100,
1068
  "sharpen_attn": false,
1069
  "tokens_norm": true,
1070
- "torch_dtype": "float32",
1071
  "transformers_version": "4.25.0.dev0",
1072
  "use_head": false,
1073
  "use_pos_embed": true
 
1
  {
 
 
 
 
2
  "attention_probs_dropout_prob": 0.0,
3
  "backbone": "hybrid",
4
  "channel_dims": null,
 
10
  3,
11
  3
12
  ],
 
 
 
 
 
 
13
  "drop_path_rate": 0.0,
14
  "eta": 1.0,
15
  "feat_downsample": false,
 
22
  "hidden_act": "gelu",
23
  "hidden_dropout_prob": 0.0,
24
  "hidden_size": 192,
25
+ "hybrid_in_channels": [
26
+ 128,
27
+ 256,
28
+ 512,
29
+ 1024
30
+ ],
31
  "hybrid_patch_size": 2,
32
  "id2label": {
33
  "0": "tench, Tinca tinca",
 
1063
  "semantic_loss_ignore_index": -100,
1064
  "sharpen_attn": false,
1065
  "tokens_norm": true,
 
1066
  "transformers_version": "4.25.0.dev0",
1067
  "use_head": false,
1068
  "use_pos_embed": true