Skip to content

Commit c1010b7

Browse files
committed
improve norm coding style
1 parent cf27700 commit c1010b7

File tree

1 file changed

+12
-13
lines changed

1 file changed

+12
-13
lines changed

deeplab.py

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ def __init__(self, C, depth, num_classes, norm=nn.BatchNorm2d, momentum=0.0003,
2121
self._C = C
2222
self._depth = depth
2323
self._num_classes = num_classes
24-
self._norm = norm
2524

2625
self.global_pooling = nn.AdaptiveAvgPool2d(1)
2726
self.relu = nn.ReLU(inplace=True)
@@ -36,14 +35,14 @@ def __init__(self, C, depth, num_classes, norm=nn.BatchNorm2d, momentum=0.0003,
3635
dilation=int(18*mult), padding=int(18*mult),
3736
bias=False)
3837
self.aspp5 = nn.Conv2d(C, depth, kernel_size=1, stride=1, bias=False)
39-
self.aspp1_bn = self._norm(depth, momentum)
40-
self.aspp2_bn = self._norm(depth, momentum)
41-
self.aspp3_bn = self._norm(depth, momentum)
42-
self.aspp4_bn = self._norm(depth, momentum)
43-
self.aspp5_bn = self._norm(depth, momentum)
38+
self.aspp1_bn = norm(depth, momentum)
39+
self.aspp2_bn = norm(depth, momentum)
40+
self.aspp3_bn = norm(depth, momentum)
41+
self.aspp4_bn = norm(depth, momentum)
42+
self.aspp5_bn = norm(depth, momentum)
4443
self.conv2 = nn.Conv2d(depth * 5, depth, kernel_size=1, stride=1,
4544
bias=False)
46-
self.bn2 = self._norm(depth, momentum)
45+
self.bn2 = norm(depth, momentum)
4746
self.conv3 = nn.Conv2d(depth, num_classes, kernel_size=1, stride=1)
4847

4948
def forward(self, x):
@@ -117,7 +116,7 @@ class ResNet(nn.Module):
117116

118117
def __init__(self, block, layers, num_classes, num_groups=None, beta=False):
119118
self.inplanes = 64
120-
self._norm = lambda planes, momentum=0.05: nn.BatchNorm2d(planes, momentum=momentum) if num_groups is None else nn.GroupNorm(num_groups, planes)
119+
self.norm = lambda planes, momentum=0.05: nn.BatchNorm2d(planes, momentum=momentum) if num_groups is None else nn.GroupNorm(num_groups, planes)
121120

122121
super(ResNet, self).__init__()
123122
if not beta:
@@ -128,15 +127,15 @@ def __init__(self, block, layers, num_classes, num_groups=None, beta=False):
128127
nn.Conv2d(3, 64, 3, stride=2, padding=1, bias=False),
129128
nn.Conv2d(64, 64, 3, stride=1, padding=1, bias=False),
130129
nn.Conv2d(64, 64, 3, stride=1, padding=1, bias=False))
131-
self.bn1 = self._norm(64)
130+
self.bn1 = self.norm(64)
132131
self.relu = nn.ReLU(inplace=True)
133132
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
134133
self.layer1 = self._make_layer(block, 64, layers[0])
135134
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
136135
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
137136
self.layer4 = self._make_layer(block, 512, layers[3], stride=1,
138137
dilation=2)
139-
self.aspp = ASPP(512 * block.expansion, 256, num_classes, self._norm)
138+
self.aspp = ASPP(512 * block.expansion, 256, num_classes, self.norm)
140139

141140
for m in self.modules():
142141
if isinstance(m, nn.Conv2d):
@@ -152,14 +151,14 @@ def _make_layer(self, block, planes, blocks, stride=1, dilation=1):
152151
downsample = nn.Sequential(
153152
nn.Conv2d(self.inplanes, planes * block.expansion,
154153
kernel_size=1, stride=stride, dilation=max(1, dilation/2), bias=False),
155-
self._norm(planes * block.expansion),
154+
self.norm(planes * block.expansion),
156155
)
157156

158157
layers = []
159-
layers.append(block(self.inplanes, planes, stride, downsample, dilation=max(1, dilation/2), norm=self._norm))
158+
layers.append(block(self.inplanes, planes, stride, downsample, dilation=max(1, dilation/2), norm=self.norm))
160159
self.inplanes = planes * block.expansion
161160
for i in range(1, blocks):
162-
layers.append(block(self.inplanes, planes, dilation=dilation, norm=self._norm))
161+
layers.append(block(self.inplanes, planes, dilation=dilation, norm=self.norm))
163162

164163
return nn.Sequential(*layers)
165164

0 commit comments

Comments
 (0)