Remove activation function from ConstellationNet

This commit is contained in:
Mattéo Delabre 2019-12-15 19:22:35 -05:00
parent 989a51b72e
commit 3c199dfc41
Signed by: matteo
GPG Key ID: AE3FBD02DC583ABB
1 changed files with 0 additions and 2 deletions

View File

@ -39,7 +39,6 @@ class ConstellationNet(nn.Module):
for layer_size in encoder_layers_sizes: for layer_size in encoder_layers_sizes:
encoder_layers.append(nn.Linear(prev_layer_size, layer_size)) encoder_layers.append(nn.Linear(prev_layer_size, layer_size))
encoder_layers.append(nn.SELU())
prev_layer_size = layer_size prev_layer_size = layer_size
encoder_layers += [ encoder_layers += [
@ -59,7 +58,6 @@ class ConstellationNet(nn.Module):
for layer_size in decoder_layers_sizes: for layer_size in decoder_layers_sizes:
decoder_layers.append(nn.Linear(prev_layer_size, layer_size)) decoder_layers.append(nn.Linear(prev_layer_size, layer_size))
decoder_layers.append(nn.SELU())
prev_layer_size = layer_size prev_layer_size = layer_size
# Softmax is not used at the end of the network because the # Softmax is not used at the end of the network because the