@@ -21,7 +21,6 @@ def __init__(self, C, depth, num_classes, norm=nn.BatchNorm2d, momentum=0.0003,
21
21
self ._C = C
22
22
self ._depth = depth
23
23
self ._num_classes = num_classes
24
- self ._norm = norm
25
24
26
25
self .global_pooling = nn .AdaptiveAvgPool2d (1 )
27
26
self .relu = nn .ReLU (inplace = True )
@@ -36,14 +35,14 @@ def __init__(self, C, depth, num_classes, norm=nn.BatchNorm2d, momentum=0.0003,
36
35
dilation = int (18 * mult ), padding = int (18 * mult ),
37
36
bias = False )
38
37
self .aspp5 = nn .Conv2d (C , depth , kernel_size = 1 , stride = 1 , bias = False )
39
- self .aspp1_bn = self . _norm (depth , momentum )
40
- self .aspp2_bn = self . _norm (depth , momentum )
41
- self .aspp3_bn = self . _norm (depth , momentum )
42
- self .aspp4_bn = self . _norm (depth , momentum )
43
- self .aspp5_bn = self . _norm (depth , momentum )
38
+ self .aspp1_bn = norm (depth , momentum )
39
+ self .aspp2_bn = norm (depth , momentum )
40
+ self .aspp3_bn = norm (depth , momentum )
41
+ self .aspp4_bn = norm (depth , momentum )
42
+ self .aspp5_bn = norm (depth , momentum )
44
43
self .conv2 = nn .Conv2d (depth * 5 , depth , kernel_size = 1 , stride = 1 ,
45
44
bias = False )
46
- self .bn2 = self . _norm (depth , momentum )
45
+ self .bn2 = norm (depth , momentum )
47
46
self .conv3 = nn .Conv2d (depth , num_classes , kernel_size = 1 , stride = 1 )
48
47
49
48
def forward (self , x ):
@@ -117,7 +116,7 @@ class ResNet(nn.Module):
117
116
118
117
def __init__ (self , block , layers , num_classes , num_groups = None , beta = False ):
119
118
self .inplanes = 64
120
- self ._norm = lambda planes , momentum = 0.05 : nn .BatchNorm2d (planes , momentum = momentum ) if num_groups is None else nn .GroupNorm (num_groups , planes )
119
+ self .norm = lambda planes , momentum = 0.05 : nn .BatchNorm2d (planes , momentum = momentum ) if num_groups is None else nn .GroupNorm (num_groups , planes )
121
120
122
121
super (ResNet , self ).__init__ ()
123
122
if not beta :
@@ -128,15 +127,15 @@ def __init__(self, block, layers, num_classes, num_groups=None, beta=False):
128
127
nn .Conv2d (3 , 64 , 3 , stride = 2 , padding = 1 , bias = False ),
129
128
nn .Conv2d (64 , 64 , 3 , stride = 1 , padding = 1 , bias = False ),
130
129
nn .Conv2d (64 , 64 , 3 , stride = 1 , padding = 1 , bias = False ))
131
- self .bn1 = self ._norm (64 )
130
+ self .bn1 = self .norm (64 )
132
131
self .relu = nn .ReLU (inplace = True )
133
132
self .maxpool = nn .MaxPool2d (kernel_size = 3 , stride = 2 , padding = 1 )
134
133
self .layer1 = self ._make_layer (block , 64 , layers [0 ])
135
134
self .layer2 = self ._make_layer (block , 128 , layers [1 ], stride = 2 )
136
135
self .layer3 = self ._make_layer (block , 256 , layers [2 ], stride = 2 )
137
136
self .layer4 = self ._make_layer (block , 512 , layers [3 ], stride = 1 ,
138
137
dilation = 2 )
139
- self .aspp = ASPP (512 * block .expansion , 256 , num_classes , self ._norm )
138
+ self .aspp = ASPP (512 * block .expansion , 256 , num_classes , self .norm )
140
139
141
140
for m in self .modules ():
142
141
if isinstance (m , nn .Conv2d ):
@@ -152,14 +151,14 @@ def _make_layer(self, block, planes, blocks, stride=1, dilation=1):
152
151
downsample = nn .Sequential (
153
152
nn .Conv2d (self .inplanes , planes * block .expansion ,
154
153
kernel_size = 1 , stride = stride , dilation = max (1 , dilation / 2 ), bias = False ),
155
- self ._norm (planes * block .expansion ),
154
+ self .norm (planes * block .expansion ),
156
155
)
157
156
158
157
layers = []
159
- layers .append (block (self .inplanes , planes , stride , downsample , dilation = max (1 , dilation / 2 ), norm = self ._norm ))
158
+ layers .append (block (self .inplanes , planes , stride , downsample , dilation = max (1 , dilation / 2 ), norm = self .norm ))
160
159
self .inplanes = planes * block .expansion
161
160
for i in range (1 , blocks ):
162
- layers .append (block (self .inplanes , planes , dilation = dilation , norm = self ._norm ))
161
+ layers .append (block (self .inplanes , planes , dilation = dilation , norm = self .norm ))
163
162
164
163
return nn .Sequential (* layers )
165
164
0 commit comments