init batch norm explicit initial values

pull/10/head
erogol 2020-05-05 17:36:12 +02:00
parent 6f5c8773d6
commit d99fda8e42
2 changed files with 2 additions and 2 deletions

View File

@ -33,7 +33,7 @@ class LinearBN(nn.Module):
super(LinearBN, self).__init__()
self.linear_layer = torch.nn.Linear(
in_features, out_features, bias=bias)
self.batch_normalization = nn.BatchNorm1d(out_features)
self.batch_normalization = nn.BatchNorm1d(out_features, momentum=0.1, eps=1e-5)
self._init_w(init_gain)
def _init_w(self, init_gain):

View File

@ -14,7 +14,7 @@ class ConvBNBlock(nn.Module):
out_channels,
kernel_size,
padding=padding)
self.batch_normalization = nn.BatchNorm1d(out_channels)
self.batch_normalization = nn.BatchNorm1d(out_channels, momentum=0.1, eps=1e-5)
self.dropout = nn.Dropout(p=0.5)
if activation == 'relu':
self.activation = nn.ReLU()