From 6d31ab3a13d91a4ba5779ccd91ffb55619d788c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matt=C3=A9o=20Delabre?= Date: Sun, 15 Dec 2019 00:03:20 -0500 Subject: [PATCH] Use ReLU with normalization --- constellation/ConstellationNet.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/constellation/ConstellationNet.py b/constellation/ConstellationNet.py index 299bcb7..7f984ff 100644 --- a/constellation/ConstellationNet.py +++ b/constellation/ConstellationNet.py @@ -36,12 +36,13 @@ class ConstellationNet(nn.Module): for layer_size in encoder_layers_sizes: encoder_layers.append(nn.Linear(prev_layer_size, layer_size)) - encoder_layers.append(nn.Tanh()) + encoder_layers.append(nn.ReLU()) prev_layer_size = layer_size encoder_layers += [ nn.Linear(prev_layer_size, 2), - nn.Tanh(), + nn.ReLU(), + nn.BatchNorm1d(2), ] self.encoder = nn.Sequential(*encoder_layers) @@ -56,7 +57,7 @@ class ConstellationNet(nn.Module): for layer_size in decoder_layers_sizes: decoder_layers.append(nn.Linear(prev_layer_size, layer_size)) - decoder_layers.append(nn.Tanh()) + decoder_layers.append(nn.ReLU()) prev_layer_size = layer_size # Softmax is not used at the end of the network because the