Skip to content

Commit

Permalink
Fix typo activatation -> activation (#2007)
Browse files Browse the repository at this point in the history
  • Loading branch information
whikwon authored and hellock committed Jan 22, 2020
1 parent b5d62ef commit 78529ec
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions mmdet/models/utils/conv_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def __init__(self,
assert set(order) == set(['conv', 'norm', 'act'])

self.with_norm = norm_cfg is not None
self.with_activatation = activation is not None
self.with_activation = activation is not None
# if the conv layer is before a norm layer, bias is unnecessary.
if bias == 'auto':
bias = False if self.with_norm else True
Expand Down Expand Up @@ -135,7 +135,7 @@ def __init__(self,
self.add_module(self.norm_name, norm)

# build activation layer
if self.with_activatation:
if self.with_activation:
# TODO: introduce `act_cfg` and supports more activation layers
if self.activation not in ['relu']:
raise ValueError('{} is currently not supported.'.format(
Expand All @@ -162,6 +162,6 @@ def forward(self, x, activate=True, norm=True):
x = self.conv(x)
elif layer == 'norm' and norm and self.with_norm:
x = self.norm(x)
elif layer == 'act' and activate and self.with_activatation:
elif layer == 'act' and activate and self.with_activation:
x = self.activate(x)
return x

0 comments on commit 78529ec

Please sign in to comment.