Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Compatible with torch2.2 #166

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 47 additions & 15 deletions modules/nsf_hifigan/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,19 @@
import torch.nn.functional as F
from lightning.pytorch.utilities.rank_zero import rank_zero_info
from torch.nn import Conv1d, ConvTranspose1d
from torch.nn.utils import weight_norm, remove_weight_norm
# from torch.nn.utils import weight_norm, remove_weight_norm

from .env import AttrDict
from .utils import init_weights, get_padding

LRELU_SLOPE = 0.1

_OLD_WEIGHT_NORM = False
try:
from torch.nn.utils.parametrizations import weight_norm
except ImportError:
from torch.nn.utils import weight_norm
from torch.nn.utils import remove_weight_norm
_OLD_WEIGHT_NORM = True

def load_model(model_path: pathlib.Path):
config_file = model_path.with_name('config.json')
Expand Down Expand Up @@ -67,10 +73,17 @@ def forward(self, x):
return x

def remove_weight_norm(self):
for l in self.convs1:
remove_weight_norm(l)
for l in self.convs2:
remove_weight_norm(l)
global _OLD_WEIGHT_NORM
if _OLD_WEIGHT_NORM:
for l in self.convs1:
remove_weight_norm(l)
for l in self.convs2:
remove_weight_norm(l)
else:
for l in self.convs1:
torch.nn.utils.parametrize.remove_parametrizations(l)
for l in self.convs2:
torch.nn.utils.parametrize.remove_parametrizations(l)


class ResBlock2(torch.nn.Module):
Expand All @@ -93,8 +106,15 @@ def forward(self, x):
return x

def remove_weight_norm(self):
for l in self.convs:
remove_weight_norm(l)

global _OLD_WEIGHT_NORM
if _OLD_WEIGHT_NORM:
for l in self.convs:
remove_weight_norm(l)

else:
for l in self.convs:
torch.nn.utils.parametrize.remove_parametrizations(l)


class SineGen(torch.nn.Module):
Expand Down Expand Up @@ -285,10 +305,22 @@ def forward(self, x, f0):
return x

def remove_weight_norm(self):
rank_zero_info('Removing weight norm...')
for l in self.ups:
remove_weight_norm(l)
for l in self.resblocks:
l.remove_weight_norm()
remove_weight_norm(self.conv_pre)
remove_weight_norm(self.conv_post)
# rank_zero_info('Removing weight norm...')
print('Removing weight norm...')
global _OLD_WEIGHT_NORM
if _OLD_WEIGHT_NORM:
for l in self.ups:
remove_weight_norm(l)
for l in self.resblocks:
l.remove_weight_norm()

remove_weight_norm(self.conv_pre)
remove_weight_norm(self.conv_post)
else:
for l in self.ups:
torch.nn.utils.parametrize.remove_parametrizations(l)
for l in self.resblocks:
l.remove_weight_norm()

torch.nn.utils.parametrize.remove_parametrizations(self.conv_pre)
torch.nn.utils.parametrize.remove_parametrizations(self.conv_post)