File size: 1,205 Bytes
a9905e1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
from transformers import PretrainedConfig
class InternImageConfig(PretrainedConfig):
model_type = "intern_image"
def __init__(
self,
core_op='DCNv3_pytorch',
channels=64,
depths=(4, 4, 18, 4),
groups=(4, 8, 16, 32),
num_classes=1000,
mlp_ratio=4.,
drop_rate=0.,
drop_path_rate=0.1,
drop_path_type='linear',
act_layer='GELU',
norm_layer='LN',
layer_scale=None,
offset_scale=1.0,
post_norm=False,
cls_scale=1.5,
with_cp=False,
**kwargs,
):
self.core_op = core_op
self.channels = channels
self.depths = depths
self.groups = groups
self.num_classes = num_classes
self.mlp_ratio = mlp_ratio
self.drop_rate = drop_rate
self.drop_path_rate = drop_path_rate
self.drop_path_type = drop_path_type
self.act_layer = act_layer
self.norm_layer = norm_layer
self.layer_scale = layer_scale
self.offset_scale = offset_scale
self.post_norm = post_norm
self.cls_scale = cls_scale
self.with_cp = with_cp
super().__init__(**kwargs)
|