text
stringlengths 0
4.99k
|
---|
)(lrelu5)
|
add3 = layers.Add()([c6, add2])
|
return add3
|
Each convolutional block uses the dilations offered by the residual stack and upsamples the input data by the upsampling_factor.
|
# Dilated convolutional block consisting of the Residual stack
|
def conv_block(input, conv_dim, upsampling_factor):
|
\"\"\"Dilated Convolutional Block with weight normalization.
|
Args:
|
conv_dim: int, determines filter size for the block.
|
upsampling_factor: int, scale for upsampling.
|
Returns:
|
Dilated convolution block.
|
\"\"\"
|
conv_t = addon_layers.WeightNormalization(
|
layers.Conv1DTranspose(conv_dim, 16, upsampling_factor, padding=\"same\"),
|
data_init=False,
|
)(input)
|
lrelu1 = layers.LeakyReLU()(conv_t)
|
res_stack = residual_stack(lrelu1, conv_dim)
|
lrelu2 = layers.LeakyReLU()(res_stack)
|
return lrelu2
|
The discriminator block consists of convolutions and downsampling layers. This block is essential for the implementation of the feature matching technique.
|
Each discriminator outputs a list of feature maps that will be compared during training to compute the feature matching loss.
|
def discriminator_block(input):
|
conv1 = addon_layers.WeightNormalization(
|
layers.Conv1D(16, 15, 1, \"same\"), data_init=False
|
)(input)
|
lrelu1 = layers.LeakyReLU()(conv1)
|
conv2 = addon_layers.WeightNormalization(
|
layers.Conv1D(64, 41, 4, \"same\", groups=4), data_init=False
|
)(lrelu1)
|
lrelu2 = layers.LeakyReLU()(conv2)
|
conv3 = addon_layers.WeightNormalization(
|
layers.Conv1D(256, 41, 4, \"same\", groups=16), data_init=False
|
)(lrelu2)
|
lrelu3 = layers.LeakyReLU()(conv3)
|
conv4 = addon_layers.WeightNormalization(
|
layers.Conv1D(1024, 41, 4, \"same\", groups=64), data_init=False
|
)(lrelu3)
|
lrelu4 = layers.LeakyReLU()(conv4)
|
conv5 = addon_layers.WeightNormalization(
|
layers.Conv1D(1024, 41, 4, \"same\", groups=256), data_init=False
|
)(lrelu4)
|
lrelu5 = layers.LeakyReLU()(conv5)
|
conv6 = addon_layers.WeightNormalization(
|
layers.Conv1D(1024, 5, 1, \"same\"), data_init=False
|
)(lrelu5)
|
lrelu6 = layers.LeakyReLU()(conv6)
|
conv7 = addon_layers.WeightNormalization(
|
layers.Conv1D(1, 3, 1, \"same\"), data_init=False
|
)(lrelu6)
|
return [lrelu1, lrelu2, lrelu3, lrelu4, lrelu5, lrelu6, conv7]
|
Create the generator
|
def create_generator(input_shape):
|
inp = keras.Input(input_shape)
|
x = MelSpec()(inp)
|
x = layers.Conv1D(512, 7, padding=\"same\")(x)
|
x = layers.LeakyReLU()(x)
|
x = conv_block(x, 256, 8)
|
x = conv_block(x, 128, 8)
|
x = conv_block(x, 64, 2)
|
x = conv_block(x, 32, 2)
|
x = addon_layers.WeightNormalization(
|
layers.Conv1D(1, 7, padding=\"same\", activation=\"tanh\")
|
)(x)
|
return keras.Model(inp, x)
|
# We use a dynamic input shape for the generator since the model is fully convolutional
|
generator = create_generator((None, 1))
|
generator.summary()
|
Model: \"model\"
|
__________________________________________________________________________________________________
|
Layer (type) Output Shape Param # Connected to
|
==================================================================================================
|
input_1 (InputLayer) [(None, None, 1)] 0
|
__________________________________________________________________________________________________
|
mel_spec (MelSpec) (None, None, 80) 0 input_1[0][0]
|
__________________________________________________________________________________________________
|
conv1d (Conv1D) (None, None, 512) 287232 mel_spec[0][0]
|
__________________________________________________________________________________________________
|
leaky_re_lu (LeakyReLU) (None, None, 512) 0 conv1d[0][0]
|
__________________________________________________________________________________________________
|
weight_normalization (WeightNor (None, None, 256) 2097921 leaky_re_lu[0][0]
|
__________________________________________________________________________________________________
|
leaky_re_lu_1 (LeakyReLU) (None, None, 256) 0 weight_normalization[0][0]
|
__________________________________________________________________________________________________
|
weight_normalization_1 (WeightN (None, None, 256) 197121 leaky_re_lu_1[0][0]
|
__________________________________________________________________________________________________
|
leaky_re_lu_2 (LeakyReLU) (None, None, 256) 0 weight_normalization_1[0][0]
|
__________________________________________________________________________________________________
|
weight_normalization_2 (WeightN (None, None, 256) 197121 leaky_re_lu_2[0][0]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.