|
| 1 | +import chainer |
| 2 | +import chainer.functions as F |
| 3 | +import chainer.links as L |
| 4 | + |
| 5 | + |
| 6 | +def _normalize(W): |
| 7 | + xp = chainer.cuda.get_array_module(W) |
| 8 | + g = xp.sqrt(xp.sum(W ** 2)).reshape((1,)) |
| 9 | + v = W / g |
| 10 | + return g, v |
| 11 | + |
| 12 | + |
| 13 | +def weight_norm(link): |
| 14 | + assert hasattr(link, 'W') |
| 15 | + |
| 16 | + def _W(self): |
| 17 | + return self.v * self.g |
| 18 | + |
| 19 | + def _remove(self): |
| 20 | + W = _W(self) |
| 21 | + del self.g |
| 22 | + del self.v |
| 23 | + del self.W |
| 24 | + with self.init_scope(): |
| 25 | + self.W = chainer.Parameter(W) |
| 26 | + |
| 27 | + def _replace(args): |
| 28 | + W = _W(args.link) |
| 29 | + g, v = _normalize(_W(args.link).array) |
| 30 | + args.link.g.array[...] = g |
| 31 | + args.link.v.array[...] = v |
| 32 | + args.link.W = W |
| 33 | + |
| 34 | + g, v = _normalize(link.W.array) |
| 35 | + del link.W |
| 36 | + with link.init_scope(): |
| 37 | + link.g = chainer.Parameter(g) |
| 38 | + link.v = chainer.Parameter(v) |
| 39 | + |
| 40 | + link.remove = _remove |
| 41 | + |
| 42 | + hook = chainer.LinkHook() |
| 43 | + hook.forward_preprocess = _replace |
| 44 | + link.add_hook(hook) |
| 45 | + return link |
| 46 | + |
| 47 | + |
| 48 | +class Invertible1x1Convolution(chainer.link.Link): |
| 49 | + def __init__(self, channel): |
| 50 | + super(Invertible1x1Convolution, self).__init__() |
| 51 | + xp = self.xp |
| 52 | + |
| 53 | + W = xp.linalg.qr(xp.random.normal( |
| 54 | + 0, 1, (channel, channel)))[0].astype(xp.float32) |
| 55 | + W = W.reshape(W.shape + (1,)) |
| 56 | + |
| 57 | + with self.init_scope(): |
| 58 | + self.W = chainer.Parameter(W) |
| 59 | + |
| 60 | + @property |
| 61 | + def invW(self): |
| 62 | + return F.expand_dims(F.inv(self.W[..., 0]), axis=2) |
| 63 | + |
| 64 | + def __call__(self, x): |
| 65 | + return F.convolution_1d(x, self.W), \ |
| 66 | + x.shape[0] * x.shape[-1] * F.log(F.absolute(F.det(self.W[..., 0]))) |
| 67 | + |
| 68 | + def reverse(self, x): |
| 69 | + return F.convolution_1d(x, self.invW) |
| 70 | + |
| 71 | + |
| 72 | +class WaveNet(chainer.Chain): |
| 73 | + def __init__(self, out_channel, n_condition, n_layers, n_channel): |
| 74 | + super(WaveNet, self).__init__() |
| 75 | + dilated_convs = chainer.ChainList() |
| 76 | + residual_convs = chainer.ChainList() |
| 77 | + skip_convs = chainer.ChainList() |
| 78 | + condition_convs = chainer.ChainList() |
| 79 | + for i in range(n_layers): |
| 80 | + dilated_convs.add_link(weight_norm( |
| 81 | + L.Convolution1D( |
| 82 | + n_channel, 2 * n_channel, 3, pad=2 ** i, dilate=2 ** i))) |
| 83 | + residual_convs.add_link(weight_norm( |
| 84 | + L.Convolution1D(n_channel, n_channel, 1))) |
| 85 | + skip_convs.add_link(weight_norm( |
| 86 | + L.Convolution1D(n_channel, n_channel, 1))) |
| 87 | + condition_convs.add_link(weight_norm( |
| 88 | + L.Convolution1D(n_condition, 2 * n_channel, 1))) |
| 89 | + with self.init_scope(): |
| 90 | + self.input_conv = weight_norm( |
| 91 | + L.Convolution1D(out_channel // 2, n_channel, 1)) |
| 92 | + self.dilated_convs = dilated_convs |
| 93 | + self.residual_convs = residual_convs |
| 94 | + self.skip_convs = skip_convs |
| 95 | + self.condition_convs = condition_convs |
| 96 | + self.output_conv = L.Convolution1D( |
| 97 | + n_channel, out_channel, 1, |
| 98 | + initialW=chainer.initializers.Zero()) |
| 99 | + |
| 100 | + def __call__(self, x, condition): |
| 101 | + x = self.input_conv(x) |
| 102 | + skip_connection = 0 |
| 103 | + for dilated, residual, skip, condition_conv in zip( |
| 104 | + self.dilated_convs, self.residual_convs, self.skip_convs, |
| 105 | + self.condition_convs): |
| 106 | + z = dilated(x) + condition_conv(condition) |
| 107 | + z_tanh, z_sigmoid = F.split_axis(z, 2, axis=1) |
| 108 | + z = F.tanh(z_tanh) * F.sigmoid(z_sigmoid) |
| 109 | + x = residual(z) |
| 110 | + skip_connection += skip(z) |
| 111 | + y = self.output_conv(skip_connection) |
| 112 | + log_s, t = F.split_axis(y, 2, axis=1) |
| 113 | + return log_s, t |
| 114 | + |
| 115 | + |
| 116 | +class AffineCouplingLayer(chainer.Chain): |
| 117 | + def __init__(self, *args, **kwargs): |
| 118 | + super(AffineCouplingLayer, self).__init__() |
| 119 | + with self.init_scope(): |
| 120 | + self.encoder = WaveNet(*args, **kwargs) |
| 121 | + |
| 122 | + def __call__(self, x, condition): |
| 123 | + x_a, x_b = F.split_axis(x, 2, axis=1) |
| 124 | + log_s, t = self.encoder(x_a, condition) |
| 125 | + x_b = F.exp(log_s) * (x_b + t) |
| 126 | + return F.concat((x_a, x_b), axis=1), F.sum(log_s) |
| 127 | + |
| 128 | + def reverse(self, z, condition): |
| 129 | + x_a, x_b = F.split_axis(z, 2, axis=1) |
| 130 | + log_s, t = self.encoder(x_a, condition) |
| 131 | + x_b = x_b * F.exp(-log_s) - t |
| 132 | + return F.concat((x_a, x_b), axis=1) |
| 133 | + |
| 134 | + |
| 135 | +class Flow(chainer.Chain): |
| 136 | + def __init__(self, channel, n_condition, n_layers, wn_channel): |
| 137 | + super(Flow, self).__init__() |
| 138 | + with self.init_scope(): |
| 139 | + self.invertible1x1convolution = Invertible1x1Convolution( |
| 140 | + channel) |
| 141 | + self.affinecouplinglayer = AffineCouplingLayer( |
| 142 | + channel, n_condition, n_layers, wn_channel) |
| 143 | + |
| 144 | + def __call__(self, x, condition): |
| 145 | + x, log_det_W = self.invertible1x1convolution(x) |
| 146 | + z, log_s = self.affinecouplinglayer(x, condition) |
| 147 | + return z, log_s, log_det_W |
| 148 | + |
| 149 | + def reverse(self, z, condition): |
| 150 | + z = self.affinecouplinglayer.reverse(z, condition) |
| 151 | + x = self.invertible1x1convolution.reverse(z) |
| 152 | + return x |
0 commit comments