diff --git a/utils/data.py b/data/labelled.py similarity index 100% rename from utils/data.py rename to data/labelled.py diff --git a/networks/layers.py b/networks/layers.py index e149519..41ba54f 100644 --- a/networks/layers.py +++ b/networks/layers.py @@ -1,7 +1,7 @@ from torch import nn import torch from torch.utils.data import DataLoader -from utils.data import SimpleDataset +from data.labelled import SimpleDataset from torch.nn import functional as F diff --git a/networks/utils.py b/networks/utils.py index d2037d1..e64e462 100644 --- a/networks/utils.py +++ b/networks/utils.py @@ -19,9 +19,9 @@ def initialize_modules(model, nonlinearity='leaky_relu', init_type='kaiming'): nn.init.xavier_uniform_(m.weight) else: print('unrecognized init type, using default PyTorch initialization scheme...') - elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm, nn.Linear)): + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm, nn.Linear, nn.LayerNorm, nn.Embedding)): nn.init.normal_(m.weight, 0.0, 0.02) - if m.bias is not None: + if hasattr(m, 'bias') and m.bias is not None: nn.init.constant_(m.bias, 0)