From 4bf0c0f363adba35bc839d751854d928d2a6969b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matt=C3=A9o=20Delabre?= Date: Mon, 16 Dec 2019 02:35:13 -0500 Subject: [PATCH] Add batch normalization and ReLU activation --- constellation/ConstellationNet.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/constellation/ConstellationNet.py b/constellation/ConstellationNet.py index 39fe417..a2b10c5 100644 --- a/constellation/ConstellationNet.py +++ b/constellation/ConstellationNet.py @@ -42,6 +42,8 @@ class ConstellationNet(nn.Module): for layer_size in encoder_layers_sizes: encoder_layers.append(nn.Linear(prev_layer_size, layer_size)) + encoder_layers.append(nn.ReLU()) + encoder_layers.append(nn.BatchNorm1d(layer_size)) prev_layer_size = layer_size encoder_layers += [ @@ -61,6 +63,8 @@ class ConstellationNet(nn.Module): for layer_size in decoder_layers_sizes: decoder_layers.append(nn.Linear(prev_layer_size, layer_size)) + encoder_layers.append(nn.ReLU()) + decoder_layers.append(nn.BatchNorm1d(layer_size)) prev_layer_size = layer_size # Softmax is not used at the end of the network because the