@@ -442,7 +442,7 @@ def drop_blocks(drop_block_rate=0.):
442
442
443
443
def make_blocks (
444
444
block_fn , channels , block_repeats , inplanes , reduce_first = 1 , output_stride = 32 ,
445
- down_kernel_size = 1 , avg_down = False , drop_block_rate = 0. , drop_path_rate = 0. , first_conv_stride = 1 , ** kwargs ):
445
+ down_kernel_size = 1 , avg_down = False , drop_block_rate = 0. , drop_path_rate = 0. , ** kwargs ):
446
446
stages = []
447
447
feature_info = []
448
448
net_num_blocks = sum (block_repeats )
@@ -451,7 +451,7 @@ def make_blocks(
451
451
dilation = prev_dilation = 1
452
452
for stage_idx , (planes , num_blocks , db ) in enumerate (zip (channels , block_repeats , drop_blocks (drop_block_rate ))):
453
453
stage_name = f'layer{ stage_idx + 1 } ' # never liked this name, but weight compat requires it
454
- stride = first_conv_stride if stage_idx == 0 else 2
454
+ stride = 1 if stage_idx == 0 else 2
455
455
if net_stride >= output_stride :
456
456
dilation *= stride
457
457
stride = 1
@@ -558,12 +558,12 @@ def __init__(self, block, layers, num_classes=1000, in_chans=3,
558
558
cardinality = 1 , base_width = 64 , stem_width = 64 , stem_type = '' ,
559
559
output_stride = 32 , block_reduce_first = 1 , down_kernel_size = 1 , avg_down = False ,
560
560
act_layer = nn .ReLU , norm_layer = nn .BatchNorm2d , aa_layer = None , drop_rate = 0.0 , drop_path_rate = 0. ,
561
- drop_block_rate = 0. , global_pool = 'avg' , zero_init_last_bn = True , block_args = None , skip_stem_max_pool = False ):
561
+ drop_block_rate = 0. , global_pool = 'avg' , zero_init_last_bn = True , block_args = None , replace_stem_max_pool = False ):
562
562
block_args = block_args or dict ()
563
563
assert output_stride in (8 , 16 , 32 )
564
564
self .num_classes = num_classes
565
565
self .drop_rate = drop_rate
566
- self .skip_stem_max_pool = skip_stem_max_pool
566
+ self .replace_stem_max_pool = replace_stem_max_pool
567
567
super (ResNet , self ).__init__ ()
568
568
569
569
# Stem
@@ -588,25 +588,27 @@ def __init__(self, block, layers, num_classes=1000, in_chans=3,
588
588
self .feature_info = [dict (num_chs = inplanes , reduction = 2 , module = 'act1' )]
589
589
590
590
# Stem Pooling
591
- if not self .skip_stem_max_pool :
592
- first_conv_stride = 1
591
+ if not self .replace_stem_max_pool :
593
592
if aa_layer is not None :
594
593
self .maxpool = nn .Sequential (* [
595
594
nn .MaxPool2d (kernel_size = 3 , stride = 1 , padding = 1 ),
596
595
aa_layer (channels = inplanes , stride = 2 )])
597
596
else :
598
597
self .maxpool = nn .MaxPool2d (kernel_size = 3 , stride = 2 , padding = 1 )
599
598
else :
600
- self .maxpool = nn .Identity ()
601
- first_conv_stride = 2
599
+ self .maxpool = nn .Sequential (* [
600
+ nn .Conv2d (inplanes , inplanes , 3 , stride = 2 , padding = 1 ),
601
+ nn .BatchNorm2d (inplanes ),
602
+ nn .ReLU ()
603
+ ])
602
604
603
605
# Feature Blocks
604
606
channels = [64 , 128 , 256 , 512 ]
605
607
stage_modules , stage_feature_info = make_blocks (
606
608
block , channels , layers , inplanes , cardinality = cardinality , base_width = base_width ,
607
609
output_stride = output_stride , reduce_first = block_reduce_first , avg_down = avg_down ,
608
610
down_kernel_size = down_kernel_size , act_layer = act_layer , norm_layer = norm_layer , aa_layer = aa_layer ,
609
- drop_block_rate = drop_block_rate , drop_path_rate = drop_path_rate , first_conv_stride = first_conv_stride , ** block_args )
611
+ drop_block_rate = drop_block_rate , drop_path_rate = drop_path_rate , ** block_args )
610
612
for stage in stage_modules :
611
613
self .add_module (* stage ) # layer1, layer2, etc
612
614
self .feature_info .extend (stage_feature_info )
@@ -1078,39 +1080,39 @@ def ecaresnet50d(pretrained=False, **kwargs):
1078
1080
@register_model
1079
1081
def resnetrs50 (pretrained = False , ** kwargs ):
1080
1082
model_args = dict (
1081
- block = Bottleneck , layers = [3 , 4 , 6 , 3 ], stem_width = 32 , stem_type = 'deep' , skip_stem_max_pool = True ,
1083
+ block = Bottleneck , layers = [3 , 4 , 6 , 3 ], stem_width = 32 , stem_type = 'deep' , replace_stem_max_pool = True ,
1082
1084
avg_down = True , block_args = dict (attn_layer = 'se' ), ** kwargs )
1083
1085
return _create_resnet ('resnetrs50' , pretrained , ** model_args )
1084
1086
1085
1087
1086
1088
@register_model
1087
1089
def resnetrs101 (pretrained = False , ** kwargs ):
1088
1090
model_args = dict (
1089
- block = Bottleneck , layers = [3 , 4 , 23 , 3 ], stem_width = 32 , stem_type = 'deep' , skip_stem_max_pool = True ,
1091
+ block = Bottleneck , layers = [3 , 4 , 23 , 3 ], stem_width = 32 , stem_type = 'deep' , replace_stem_max_pool = True ,
1090
1092
avg_down = True , block_args = dict (attn_layer = 'se' ), ** kwargs )
1091
1093
return _create_resnet ('resnetrs101' , pretrained , ** model_args )
1092
1094
1093
1095
1094
1096
@register_model
1095
1097
def resnetrs152 (pretrained = False , ** kwargs ):
1096
1098
model_args = dict (
1097
- block = Bottleneck , layers = [3 , 8 , 36 , 3 ], stem_width = 32 , stem_type = 'deep' , skip_stem_max_pool = True ,
1099
+ block = Bottleneck , layers = [3 , 8 , 36 , 3 ], stem_width = 32 , stem_type = 'deep' , replace_stem_max_pool = True ,
1098
1100
avg_down = True , block_args = dict (attn_layer = 'se' ), ** kwargs )
1099
1101
return _create_resnet ('resnetrs152' , pretrained , ** model_args )
1100
1102
1101
1103
1102
1104
@register_model
1103
1105
def resnetrs200 (pretrained = False , ** kwargs ):
1104
1106
model_args = dict (
1105
- block = Bottleneck , layers = [3 , 24 , 36 , 3 ], stem_width = 32 , stem_type = 'deep' , skip_stem_max_pool = True ,
1107
+ block = Bottleneck , layers = [3 , 24 , 36 , 3 ], stem_width = 32 , stem_type = 'deep' , replace_stem_max_pool = True ,
1106
1108
avg_down = True , block_args = dict (attn_layer = 'se' ), ** kwargs )
1107
1109
return _create_resnet ('resnetrs200' , pretrained , ** model_args )
1108
1110
1109
1111
1110
1112
@register_model
1111
1113
def resnetrs270 (pretrained = False , ** kwargs ):
1112
1114
model_args = dict (
1113
- block = Bottleneck , layers = [4 , 29 , 53 , 4 ], stem_width = 32 , stem_type = 'deep' , skip_stem_max_pool = True ,
1115
+ block = Bottleneck , layers = [4 , 29 , 53 , 4 ], stem_width = 32 , stem_type = 'deep' , replace_stem_max_pool = True ,
1114
1116
avg_down = True , block_args = dict (attn_layer = 'se' ), ** kwargs )
1115
1117
return _create_resnet ('resnetrs270' , pretrained , ** model_args )
1116
1118
@@ -1119,15 +1121,15 @@ def resnetrs270(pretrained=False, **kwargs):
1119
1121
@register_model
1120
1122
def resnetrs350 (pretrained = False , ** kwargs ):
1121
1123
model_args = dict (
1122
- block = Bottleneck , layers = [4 , 36 , 72 , 4 ], stem_width = 32 , stem_type = 'deep' , skip_stem_max_pool = True ,
1124
+ block = Bottleneck , layers = [4 , 36 , 72 , 4 ], stem_width = 32 , stem_type = 'deep' , replace_stem_max_pool = True ,
1123
1125
avg_down = True , block_args = dict (attn_layer = 'se' ), ** kwargs )
1124
1126
return _create_resnet ('resnetrs350' , pretrained , ** model_args )
1125
1127
1126
1128
1127
1129
@register_model
1128
1130
def resnetrs420 (pretrained = False , ** kwargs ):
1129
1131
model_args = dict (
1130
- block = Bottleneck , layers = [4 , 44 , 87 , 4 ], stem_width = 32 , stem_type = 'deep' , skip_stem_max_pool = True ,
1132
+ block = Bottleneck , layers = [4 , 44 , 87 , 4 ], stem_width = 32 , stem_type = 'deep' , replace_stem_max_pool = True ,
1131
1133
avg_down = True , block_args = dict (attn_layer = 'se' ), ** kwargs )
1132
1134
return _create_resnet ('resnetrs420' , pretrained , ** model_args )
1133
1135
0 commit comments