Add batch normalization and ReLU activation
This commit is contained in:
parent
8fa6b46ca8
commit
4bf0c0f363
|
@ -42,6 +42,8 @@ class ConstellationNet(nn.Module):
|
|||
|
||||
for layer_size in encoder_layers_sizes:
|
||||
encoder_layers.append(nn.Linear(prev_layer_size, layer_size))
|
||||
encoder_layers.append(nn.ReLU())
|
||||
encoder_layers.append(nn.BatchNorm1d(layer_size))
|
||||
prev_layer_size = layer_size
|
||||
|
||||
encoder_layers += [
|
||||
|
@ -61,6 +63,8 @@ class ConstellationNet(nn.Module):
|
|||
|
||||
for layer_size in decoder_layers_sizes:
|
||||
decoder_layers.append(nn.Linear(prev_layer_size, layer_size))
|
||||
encoder_layers.append(nn.ReLU())
|
||||
decoder_layers.append(nn.BatchNorm1d(layer_size))
|
||||
prev_layer_size = layer_size
|
||||
|
||||
# Softmax is not used at the end of the network because the
|
||||
|
|
Loading…
Reference in New Issue