From d0af8fc3dab655c6ad71ce084fd8ccbec946255b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matt=C3=A9o=20Delabre?= Date: Sat, 14 Dec 2019 23:03:02 -0500 Subject: [PATCH] Use Tanh activation to preserve negative values --- constellation/ConstellationNet.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/constellation/ConstellationNet.py b/constellation/ConstellationNet.py index 60f5db3..7bcfa4b 100644 --- a/constellation/ConstellationNet.py +++ b/constellation/ConstellationNet.py @@ -32,13 +32,12 @@ class ConstellationNet(nn.Module): for layer_size in encoder_layers_sizes: encoder_layers.append(nn.Linear(prev_layer_size, layer_size)) - encoder_layers.append(nn.ReLU()) + encoder_layers.append(nn.Tanh()) prev_layer_size = layer_size encoder_layers += [ nn.Linear(prev_layer_size, 2), - nn.ReLU(), - # TODO: Normalization step + nn.Tanh(), ] self.encoder = nn.Sequential(*encoder_layers) @@ -55,7 +54,7 @@ class ConstellationNet(nn.Module): for layer_size in decoder_layers_sizes: decoder_layers.append(nn.Linear(prev_layer_size, layer_size)) - decoder_layers.append(nn.ReLU()) + decoder_layers.append(nn.Tanh()) prev_layer_size = layer_size # Softmax is not used at the end of the network because the