mirror of https://github.com/coqui-ai/TTS.git
init batch norm explicit initial values
parent
6f5c8773d6
commit
d99fda8e42
|
@ -33,7 +33,7 @@ class LinearBN(nn.Module):
|
|||
super(LinearBN, self).__init__()
|
||||
self.linear_layer = torch.nn.Linear(
|
||||
in_features, out_features, bias=bias)
|
||||
self.batch_normalization = nn.BatchNorm1d(out_features)
|
||||
self.batch_normalization = nn.BatchNorm1d(out_features, momentum=0.1, eps=1e-5)
|
||||
self._init_w(init_gain)
|
||||
|
||||
def _init_w(self, init_gain):
|
||||
|
|
|
@ -14,7 +14,7 @@ class ConvBNBlock(nn.Module):
|
|||
out_channels,
|
||||
kernel_size,
|
||||
padding=padding)
|
||||
self.batch_normalization = nn.BatchNorm1d(out_channels)
|
||||
self.batch_normalization = nn.BatchNorm1d(out_channels, momentum=0.1, eps=1e-5)
|
||||
self.dropout = nn.Dropout(p=0.5)
|
||||
if activation == 'relu':
|
||||
self.activation = nn.ReLU()
|
||||
|
|
Loading…
Reference in New Issue