Skip to content

Commit

Permalink
Add init weights for hrnet_contrast (PaddlePaddle#1746)
Browse files Browse the repository at this point in the history
  • Loading branch information
juncaipeng committed Mar 14, 2022
1 parent cd8d52d commit 88c940d
Showing 1 changed file with 25 additions and 21 deletions.
46 changes: 25 additions & 21 deletions paddleseg/models/hrnet_contrast.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ class HRNetW48Contrast(nn.Layer):
e.g. 1024x512, otherwise it is True, e.g. 769x769. Default: False.
pretrained (str, optional): The path or url of pretrained model. Default: None.
"""

def __init__(self,
in_channels,
num_classes,
Expand All @@ -54,23 +55,23 @@ def __init__(self,
self.num_classes = num_classes
self.proj_dim = proj_dim
self.align_corners = align_corners
self.pretrained = pretrained

self.cls_head = nn.Sequential(
layers.ConvBNReLU(in_channels,
in_channels,
kernel_size=3,
stride=1,
padding=1),
layers.ConvBNReLU(
in_channels, in_channels, kernel_size=3, stride=1, padding=1),
nn.Dropout2D(drop_prob),
nn.Conv2D(in_channels,
num_classes,
kernel_size=1,
stride=1,
bias_attr=False),
nn.Conv2D(
in_channels,
num_classes,
kernel_size=1,
stride=1,
bias_attr=False),
)
self.proj_head = ProjectionHead(dim_in=in_channels,
proj_dim=self.proj_dim)
self.proj_head = ProjectionHead(
dim_in=in_channels, proj_dim=self.proj_dim)

self.pretrained = pretrained
self.init_weight()

def init_weight(self):
if self.pretrained is not None:
Expand All @@ -83,17 +84,19 @@ def forward(self, x):
if self.training:
emb = self.proj_head(feats)
logit_list.append(
F.interpolate(out,
paddle.shape(x)[2:],
mode='bilinear',
align_corners=self.align_corners))
F.interpolate(
out,
paddle.shape(x)[2:],
mode='bilinear',
align_corners=self.align_corners))
logit_list.append({'seg': out, 'embed': emb})
else:
logit_list.append(
F.interpolate(out,
paddle.shape(x)[2:],
mode='bilinear',
align_corners=self.align_corners))
F.interpolate(
out,
paddle.shape(x)[2:],
mode='bilinear',
align_corners=self.align_corners))
return logit_list


Expand All @@ -105,6 +108,7 @@ class ProjectionHead(nn.Layer):
proj_dim (int, optional): The output dimensions of projection head. Default: 256.
proj (str, optional): The type of projection head, only support 'linear' and 'convmlp'. Default: 'convmlp'.
"""

def __init__(self, dim_in, proj_dim=256, proj='convmlp'):
super(ProjectionHead, self).__init__()
if proj == 'linear':
Expand Down

0 comments on commit 88c940d

Please sign in to comment.