Add batch normalization and ReLU activation

This commit is contained in:
Mattéo Delabre 2019-12-16 02:35:13 -05:00
parent 8fa6b46ca8
commit 4bf0c0f363
Signed by: matteo
GPG Key ID: AE3FBD02DC583ABB
1 changed files with 4 additions and 0 deletions

View File

@ -42,6 +42,8 @@ class ConstellationNet(nn.Module):
for layer_size in encoder_layers_sizes:
encoder_layers.append(nn.Linear(prev_layer_size, layer_size))
encoder_layers.append(nn.ReLU())
encoder_layers.append(nn.BatchNorm1d(layer_size))
prev_layer_size = layer_size
encoder_layers += [
@ -61,6 +63,8 @@ class ConstellationNet(nn.Module):
for layer_size in decoder_layers_sizes:
decoder_layers.append(nn.Linear(prev_layer_size, layer_size))
encoder_layers.append(nn.ReLU())
decoder_layers.append(nn.BatchNorm1d(layer_size))
prev_layer_size = layer_size
# Softmax is not used at the end of the network because the