From bf2f1af088558e5a150d858294283945515585db Mon Sep 17 00:00:00 2001 From: Richard Zhang Date: Wed, 21 Oct 2020 17:26:30 -0700 Subject: [PATCH] s3 links --- antialiased_cnns/alexnet.py | 8 +++- antialiased_cnns/densenet.py | 35 +++++++++++------ antialiased_cnns/mobilenet.py | 11 +++--- antialiased_cnns/resnet.py | 74 ++++++++++++++++++++++++----------- antialiased_cnns/vgg.py | 73 ++++++++++++++++++++++------------ 5 files changed, 137 insertions(+), 64 deletions(-) diff --git a/antialiased_cnns/alexnet.py b/antialiased_cnns/alexnet.py index 267bad3..114e3bf 100644 --- a/antialiased_cnns/alexnet.py +++ b/antialiased_cnns/alexnet.py @@ -51,6 +51,7 @@ 'alexnet_lpf3': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/alexnet_lpf3-f9bbc410.pth', 'alexnet_lpf4': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/alexnet_lpf4-0114fe25.pth', 'alexnet_lpf5': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/alexnet_lpf5-4fa3706a.pth', + 'alexnet_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/alexnet_lpf4_finetune-20598a7a.pth', } @@ -106,7 +107,7 @@ def forward(self, x): return x -def alexnet(pretrained=False, filter_size=4, **kwargs): +def alexnet(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs): """AlexNet model architecture from the `"One weird trick..." `_ paper. @@ -116,7 +117,10 @@ def alexnet(pretrained=False, filter_size=4, **kwargs): """ model = AlexNet(filter_size=filter_size, **kwargs) if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['alexnet_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) + if(filter_size==4 and not _force_nonfinetuned): + model.load_state_dict(model_zoo.load_url(model_urls['alexnet_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + model.load_state_dict(model_zoo.load_url(model_urls['alexnet_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) return model diff --git a/antialiased_cnns/densenet.py b/antialiased_cnns/densenet.py index ee3968a..3f0aeca 100644 --- a/antialiased_cnns/densenet.py +++ b/antialiased_cnns/densenet.py @@ -53,6 +53,10 @@ 'densenet121_lpf3': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/densenet121_lpf3-0f267ad8.pth', 'densenet121_lpf4': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/densenet121_lpf4-edeaab00.pth', 'densenet121_lpf5': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/densenet121_lpf5-ebc7880c.pth', + 'densenet121_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/densenet121_lpf4_finetune-eceaa619.pth', + 'densenet161_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/densenet161_lpf4_finetune-a5e0f328.pth', + 'densenet169_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/densenet169_lpf4_finetune-992131e6.pth', + 'densenet201_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/densenet201_lpf4_finetune-736979b2.pth', } @@ -191,7 +195,7 @@ def _load_state_dict(model, model_url): model.load_state_dict(state_dict) -def densenet121(pretrained=False, filter_size=4, pool_only=True, **kwargs): +def densenet121(pretrained=False, filter_size=4, pool_only=True, _force_nonfinetuned=False, **kwargs): r"""Densenet-121 model from `"Densely Connected Convolutional Networks" `_ Args: @@ -202,11 +206,14 @@ def densenet121(pretrained=False, filter_size=4, pool_only=True, **kwargs): model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 24, 16), filter_size=filter_size, pool_only=pool_only, **kwargs) if pretrained: - _load_state_dict(model, model_urls['densenet121_lpf%i'%filter_size]) + if(filter_size==4 and not _force_nonfinetuned): + _load_state_dict(model, model_urls['densenet121_lpf4_finetune']) + else: + _load_state_dict(model, model_urls['densenet121_lpf%i'%filter_size]) return model -def densenet169(pretrained=False, filter_size=4, pool_only=True, **kwargs): +def densenet169(pretrained=False, filter_size=4, pool_only=True, _force_nonfinetuned=False, **kwargs): r"""Densenet-169 model from `"Densely Connected Convolutional Networks" `_ Args: @@ -216,12 +223,14 @@ def densenet169(pretrained=False, filter_size=4, pool_only=True, **kwargs): model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 32, 32), filter_size=filter_size, pool_only=pool_only, **kwargs) if pretrained: - raise ValueError('No pretrained model available') - # _load_state_dict(model, model_urls['densenet169']) + if(filter_size==4): + _load_state_dict(model, model_urls['densenet169_lpf4_finetune']) + else: + raise ValueError('No pretrained model available') return model -def densenet201(pretrained=False, filter_size=4, pool_only=True, **kwargs): +def densenet201(pretrained=False, filter_size=4, pool_only=True, _force_nonfinetuned=False, **kwargs): r"""Densenet-201 model from `"Densely Connected Convolutional Networks" `_ Args: @@ -231,12 +240,14 @@ def densenet201(pretrained=False, filter_size=4, pool_only=True, **kwargs): model = DenseNet(num_init_features=64, growth_rate=32, block_config=(6, 12, 48, 32), filter_size=filter_size, pool_only=pool_only, **kwargs) if pretrained: - raise ValueError('No pretrained model available') - # _load_state_dict(model, model_urls['densenet201']) + if(filter_size==4): + _load_state_dict(model, model_urls['densenet201_lpf4_finetune']) + else: + raise ValueError('No pretrained model available') return model -def densenet161(pretrained=False, filter_size=4, pool_only=True, **kwargs): +def densenet161(pretrained=False, filter_size=4, pool_only=True, _force_nonfinetuned=False, **kwargs): r"""Densenet-161 model from `"Densely Connected Convolutional Networks" `_ Args: @@ -246,6 +257,8 @@ def densenet161(pretrained=False, filter_size=4, pool_only=True, **kwargs): model = DenseNet(num_init_features=96, growth_rate=48, block_config=(6, 12, 36, 24), filter_size=filter_size, pool_only=pool_only, **kwargs) if pretrained: - raise ValueError('No pretrained model available') - # _load_state_dict(model, model_urls['densenet161']) + if(filter_size==4): + _load_state_dict(model, model_urls['densenet161_lpf4_finetune']) + else: + raise ValueError('No pretrained model available') return model \ No newline at end of file diff --git a/antialiased_cnns/mobilenet.py b/antialiased_cnns/mobilenet.py index d9e2166..9c2b1ec 100644 --- a/antialiased_cnns/mobilenet.py +++ b/antialiased_cnns/mobilenet.py @@ -49,6 +49,7 @@ 'mobilenet_v2_lpf3': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/mobilenet_v2_lpf3-23b2e2ee.pth', 'mobilenet_v2_lpf4': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/mobilenet_v2_lpf4-2e0b9cb9.pth', 'mobilenet_v2_lpf5': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/mobilenet_v2_lpf5-ab8fe968.pth', + 'mobilenet_v2_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/mobilenet_v2_lpf4_finetune-7eed94b1.pth', } @@ -160,7 +161,7 @@ def forward(self, x): return x -def mobilenet_v2(pretrained=False, filter_size=4, **kwargs): +def mobilenet_v2(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs): """ Constructs a MobileNetV2 architecture from `"MobileNetV2: Inverted Residuals and Linear Bottlenecks" `_. @@ -170,9 +171,9 @@ def mobilenet_v2(pretrained=False, filter_size=4, **kwargs): """ model = MobileNetV2(filter_size=filter_size, **kwargs) if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['mobilenet_v2_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) - # state_dict = load_state_dict_from_url(model_urls['mobilenet_v2'], - # progress=progress) - # model.load_state_dict(state_dict) + if(filter_size==4 and not _force_nonfinetuned): + model.load_state_dict(model_zoo.load_url(model_urls['mobilenet_v2_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + model.load_state_dict(model_zoo.load_url(model_urls['mobilenet_v2_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) return model \ No newline at end of file diff --git a/antialiased_cnns/resnet.py b/antialiased_cnns/resnet.py index 564e50d..d8c52ee 100644 --- a/antialiased_cnns/resnet.py +++ b/antialiased_cnns/resnet.py @@ -64,9 +64,16 @@ 'resnet101_lpf4': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/resnet101_lpf4-f8a116ff.pth', 'resnet101_lpf5': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/resnet101_lpf5-1f3745af.pth', 'resnet18_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/resnet18_lpf4_finetune-8cc58f59.pth', + 'resnet34_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/resnet34_lpf4_finetune-db622952.pth', + 'resnet50_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/resnet50_lpf4_finetune-cad66808.pth', + 'resnet101_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/resnet101_lpf4_finetune-9280acb0.pth', + 'resnet152_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/resnet152_lpf4_finetune-7f67d9ae.pth', + 'resnext50_32x4d_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/resnext50_32x4d_lpf4_finetune-9106e549.pth', + 'resnext101_32x8d_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/resnext101_32x8d_lpf4_finetune-8f13a25d.pth', + 'wide_resnet50_2_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/wide_resnet50_2_lpf4_finetune-02a183f7.pth', + 'wide_resnet101_2_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/wide_resnet101_2_lpf4_finetune-da4eae04.pth', } - def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1): """3x3 convolution with padding""" return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, @@ -293,75 +300,94 @@ def resnet18(pretrained=False, filter_size=4, pool_only=True, _force_nonfinetune return model -def resnet34(pretrained=False, filter_size=4, pool_only=True, **kwargs): +def resnet34(pretrained=False, filter_size=4, pool_only=True, _force_nonfinetuned=False, **kwargs): """Constructs a ResNet-34 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet filter_size (int): Antialiasing filter size pool_only (bool): [True] don't antialias the first downsampling operation (which is costly to antialias) + _force_nonfinetuned (bool): [False] If True, load the trained-from scratch pretrained model (if available) """ model = ResNet(BasicBlock, [3, 4, 6, 3], filter_size=filter_size, pool_only=pool_only, **kwargs) if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['resnet34_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) + if(filter_size==4 and not _force_nonfinetuned): + model.load_state_dict(model_zoo.load_url(model_urls['resnet34_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + model.load_state_dict(model_zoo.load_url(model_urls['resnet34_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) return model -def resnet50(pretrained=False, filter_size=4, pool_only=True, **kwargs): +def resnet50(pretrained=False, filter_size=4, pool_only=True, _force_nonfinetuned=False, **kwargs): """Constructs a ResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet filter_size (int): Antialiasing filter size pool_only (bool): [True] don't antialias the first downsampling operation (which is costly to antialias) + _force_nonfinetuned (bool): [False] If True, load the trained-from scratch pretrained model (if available) """ model = ResNet(Bottleneck, [3, 4, 6, 3], filter_size=filter_size, pool_only=pool_only, **kwargs) if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['resnet50_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) + if(filter_size==4 and not _force_nonfinetuned): + model.load_state_dict(model_zoo.load_url(model_urls['resnet50_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + model.load_state_dict(model_zoo.load_url(model_urls['resnet50_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) return model -def resnet101(pretrained=False, filter_size=4, pool_only=True, **kwargs): +def resnet101(pretrained=False, filter_size=4, pool_only=True, _force_nonfinetuned=False, **kwargs): """Constructs a ResNet-101 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet filter_size (int): Antialiasing filter size pool_only (bool): [True] don't antialias the first downsampling operation (which is costly to antialias) + _force_nonfinetuned (bool): [False] If True, load the trained-from scratch pretrained model (if available) """ model = ResNet(Bottleneck, [3, 4, 23, 3], filter_size=filter_size, pool_only=pool_only, **kwargs) if pretrained: - model.load_state_dict(model_zoo.load_url(model_urls['resnet101_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) + if(filter_size==4 and not _force_nonfinetuned): + model.load_state_dict(model_zoo.load_url(model_urls['resnet101_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + model.load_state_dict(model_zoo.load_url(model_urls['resnet101_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) return model -def resnet152(pretrained=False, filter_size=4, pool_only=True, **kwargs): +def resnet152(pretrained=False, filter_size=4, pool_only=True, _force_nonfinetuned=False, **kwargs): """Constructs a ResNet-152 model. Args: filter_size (int): Antialiasing filter size pool_only (bool): [True] don't antialias the first downsampling operation (which is costly to antialias) + _force_nonfinetuned (bool): [False] If True, load the trained-from scratch pretrained model (if available) """ model = ResNet(Bottleneck, [3, 8, 36, 3], filter_size=filter_size, pool_only=pool_only, **kwargs) if pretrained: - raise ValueError('No pretrained model available') - # model.load_state_dict(model_zoo.load_url(model_urls['resnet152'])) + if(filter_size==4): + model.load_state_dict(model_zoo.load_url(model_urls['resnet152_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + raise ValueError('No pretrained model available') return model -def resnext50_32x4d(pretrained=False, filter_size=4, pool_only=True, **kwargs): +def resnext50_32x4d(pretrained=False, filter_size=4, pool_only=True, _force_nonfinetuned=False, **kwargs): model = ResNet(Bottleneck, [3, 4, 6, 3], groups=32, width_per_group=4, filter_size=filter_size, pool_only=pool_only, **kwargs) if pretrained: - raise ValueError('No pretrained model available') - # model.load_state_dict(model_zoo.load_url(model_urls['resnext50_32x4d'])) + if(filter_size==4): + model.load_state_dict(model_zoo.load_url(model_urls['resnext50_32x4d_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + raise ValueError('No pretrained model available') return model -def resnext101_32x8d(pretrained=False, filter_size=4, pool_only=True, **kwargs): +def resnext101_32x8d(pretrained=False, filter_size=4, pool_only=True, _force_nonfinetuned=False, **kwargs): model = ResNet(Bottleneck, [3, 4, 23, 3], groups=32, width_per_group=8, filter_size=filter_size, pool_only=pool_only, **kwargs) if pretrained: - raise ValueError('No pretrained model available') - # model.load_state_dict(model_zoo.load_url(model_urls['resnext101_32x8d'])) + if(filter_size==4): + model.load_state_dict(model_zoo.load_url(model_urls['resnext101_32x8d_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + raise ValueError('No pretrained model available') return model -def wide_resnet50_2(pretrained=False, filter_size=4, **kwargs): +def wide_resnet50_2(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs): """Wide ResNet-50-2 model from `"Wide Residual Networks" `_ @@ -376,11 +402,13 @@ def wide_resnet50_2(pretrained=False, filter_size=4, **kwargs): """ model = ResNet(Bottleneck, [3, 4, 6, 3], width_per_group=64*2, filter_size=filter_size, **kwargs) if pretrained: - raise ValueError('No pretrained model available') - model.load_state_dict(state_dict) + if(filter_size==4): + model.load_state_dict(model_zoo.load_url(model_urls['wide_resnet50_2_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + raise ValueError('No pretrained model available') return model -def wide_resnet101_2(pretrained=False, filter_size=4, **kwargs): +def wide_resnet101_2(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs): """Wide ResNet-101-2 model from `"Wide Residual Networks" `_ @@ -395,6 +423,8 @@ def wide_resnet101_2(pretrained=False, filter_size=4, **kwargs): """ model = ResNet(Bottleneck, [3, 4, 23, 3], width_per_group=64*2, filter_size=filter_size, **kwargs) if pretrained: - raise ValueError('No pretrained model available') - model.load_state_dict(state_dict) + if(filter_size==4): + model.load_state_dict(model_zoo.load_url(model_urls['wide_resnet101_2_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + raise ValueError('No pretrained model available') return model diff --git a/antialiased_cnns/vgg.py b/antialiased_cnns/vgg.py index 3a57d1b..75e749c 100644 --- a/antialiased_cnns/vgg.py +++ b/antialiased_cnns/vgg.py @@ -56,9 +56,18 @@ 'vgg16_lpf3': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/vgg16_lpf3-e9b0ce42.pth', 'vgg16_lpf4': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/vgg16_lpf4-de9267ac.pth', 'vgg16_lpf5': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/vgg16_lpf5-1391f70c.pth', + 'vgg11_bn_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/vgg11_bn_lpf4_finetune-5d60b5e4.pth', + 'vgg11_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/vgg11_lpf4_finetune-35eab449.pth', + 'vgg13_bn_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/vgg13_bn_lpf4_finetune-45e2a72f.pth', + 'vgg13_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/vgg13_lpf4_finetune-d8ff02c4.pth', + 'vgg16_bn_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/vgg16_bn_lpf4_finetune-1dd798d1.pth', + 'vgg16_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/vgg16_lpf4_finetune-79c9dff7.pth', + 'vgg19_bn_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/vgg19_bn_lpf4_finetune-d0114293.pth', + 'vgg19_lpf4_finetune': 'https://antialiased-cnns.s3.us-east-2.amazonaws.com/weights_v0.1/vgg19_lpf4_finetune-7ab2cf45.pth' } + class VGG(nn.Module): def __init__(self, features, num_classes=1000, init_weights=True): @@ -127,7 +136,7 @@ def make_layers(cfg, batch_norm=False, filter_size=1): 'E': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'], } -def vgg11(pretrained=False, filter_size=4, **kwargs): +def vgg11(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs): """VGG 11-layer model (configuration "A") Args: @@ -137,12 +146,14 @@ def vgg11(pretrained=False, filter_size=4, **kwargs): kwargs['init_weights'] = False model = VGG(make_layers(cfg['A'], filter_size=filter_size), **kwargs) if pretrained: - raise ValueError('No pretrained model available') - # model.load_state_dict(model_zoo.load_url(model_urls['vgg11'])) + if(filter_size==4): + model.load_state_dict(model_zoo.load_url(model_urls['vgg11_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + raise ValueError('No pretrained model available') return model -def vgg11_bn(pretrained=False, filter_size=4, **kwargs): +def vgg11_bn(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs): """VGG 11-layer model (configuration "A") with batch normalization Args: @@ -152,12 +163,14 @@ def vgg11_bn(pretrained=False, filter_size=4, **kwargs): kwargs['init_weights'] = False model = VGG(make_layers(cfg['A'], filter_size=filter_size, batch_norm=True), **kwargs) if pretrained: - raise ValueError('No pretrained model available') - # model.load_state_dict(model_zoo.load_url(model_urls['vgg11_bn'])) + if(filter_size==4): + model.load_state_dict(model_zoo.load_url(model_urls['vgg11_bn_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + raise ValueError('No pretrained model available') return model -def vgg13(pretrained=False, filter_size=4, **kwargs): +def vgg13(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs): """VGG 13-layer model (configuration "B") Args: @@ -167,12 +180,14 @@ def vgg13(pretrained=False, filter_size=4, **kwargs): kwargs['init_weights'] = False model = VGG(make_layers(cfg['B'], filter_size=filter_size), **kwargs) if pretrained: - raise ValueError('No pretrained model available') - # model.load_state_dict(model_zoo.load_url(model_urls['vgg13'])) + if(filter_size==4): + model.load_state_dict(model_zoo.load_url(model_urls['vgg13_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + raise ValueError('No pretrained model available') return model -def vgg13_bn(pretrained=False, filter_size=4, **kwargs): +def vgg13_bn(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs): """VGG 13-layer model (configuration "B") with batch normalization Args: @@ -182,12 +197,14 @@ def vgg13_bn(pretrained=False, filter_size=4, **kwargs): kwargs['init_weights'] = False model = VGG(make_layers(cfg['B'], filter_size=filter_size, batch_norm=True), **kwargs) if pretrained: - raise ValueError('No pretrained model available') - # model.load_state_dict(model_zoo.load_url(model_urls['vgg13_bn'])) + if(filter_size==4): + model.load_state_dict(model_zoo.load_url(model_urls['vgg13_bn_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + raise ValueError('No pretrained model available') return model -def vgg16(pretrained=False, filter_size=4, **kwargs): +def vgg16(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs): """VGG 16-layer model (configuration "D") Args: @@ -198,12 +215,14 @@ def vgg16(pretrained=False, filter_size=4, **kwargs): kwargs['init_weights'] = False model = VGG(make_layers(cfg['D'], filter_size=filter_size), **kwargs) if pretrained: - # model.load_state_dict(model_zoo.load_url(model_urls['vgg16'])) - model.load_state_dict(model_zoo.load_url(model_urls['vgg16_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) + if(filter_size==4 and not _force_nonfinetuned): + model.load_state_dict(model_zoo.load_url(model_urls['vgg16_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + model.load_state_dict(model_zoo.load_url(model_urls['vgg16_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) return model -def vgg16_bn(pretrained=False, filter_size=4, **kwargs): +def vgg16_bn(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs): """VGG 16-layer model (configuration "D") with batch normalization Args: @@ -214,12 +233,14 @@ def vgg16_bn(pretrained=False, filter_size=4, **kwargs): kwargs['init_weights'] = False model = VGG(make_layers(cfg['D'], filter_size=filter_size, batch_norm=True), **kwargs) if pretrained: - # model.load_state_dict(model_zoo.load_url(model_urls['vgg16_bn'])) - model.load_state_dict(model_zoo.load_url(model_urls['vgg16_bn_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) + if(filter_size==4): + model.load_state_dict(model_zoo.load_url(model_urls['vgg16_bn_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + model.load_state_dict(model_zoo.load_url(model_urls['vgg16_bn_lpf%i'%filter_size], map_location='cpu', check_hash=True)['state_dict']) return model -def vgg19(pretrained=False, filter_size=4, **kwargs): +def vgg19(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs): """VGG 19-layer model (configuration "E") Args: @@ -229,12 +250,14 @@ def vgg19(pretrained=False, filter_size=4, **kwargs): kwargs['init_weights'] = False model = VGG(make_layers(cfg['E'], filter_size=filter_size), **kwargs) if pretrained: - raise ValueError('No pretrained model available') - # model.load_state_dict(model_zoo.load_url(model_urls['vgg19'])) + if(filter_size==4): + model.load_state_dict(model_zoo.load_url(model_urls['vgg19_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + raise ValueError('No pretrained model available') return model -def vgg19_bn(pretrained=False, filter_size=4, **kwargs): +def vgg19_bn(pretrained=False, filter_size=4, _force_nonfinetuned=False, **kwargs): """VGG 19-layer model (configuration 'E') with batch normalization Args: @@ -244,7 +267,9 @@ def vgg19_bn(pretrained=False, filter_size=4, **kwargs): kwargs['init_weights'] = False model = VGG(make_layers(cfg['E'], filter_size=filter_size, batch_norm=True), **kwargs) if pretrained: - raise ValueError('No pretrained model available') - # model.load_state_dict(model_zoo.load_url(model_urls['vgg19_bn'])) + if(filter_size==4): + model.load_state_dict(model_zoo.load_url(model_urls['vgg19_bn_lpf4_finetune'], map_location='cpu', check_hash=True)['state_dict']) + else: + raise ValueError('No pretrained model available') return model