From c32d9da5676cf35cd5316e4552ffe220a482f224 Mon Sep 17 00:00:00 2001
From: Alex Moreo <alejandro.moreo@isti.cnr.it>
Date: Wed, 8 Jul 2020 12:13:35 +0200
Subject: [PATCH] activating every layer

---
 src/model/classifiers.py     |  1 -
 src/model/transformations.py | 17 ++++++++++-------
 2 files changed, 10 insertions(+), 8 deletions(-)

diff --git a/src/model/classifiers.py b/src/model/classifiers.py
index b2c0515..3e757f9 100644
--- a/src/model/classifiers.py
+++ b/src/model/classifiers.py
@@ -5,7 +5,6 @@ from sklearn.metrics import accuracy_score, f1_score
 from tqdm import tqdm
 import math
 from sklearn.model_selection import train_test_split
-
 from model.early_stop import EarlyStop
 from model.transformations import FFProjection
 
diff --git a/src/model/transformations.py b/src/model/transformations.py
index 4380213..6d0911e 100644
--- a/src/model/transformations.py
+++ b/src/model/transformations.py
@@ -19,7 +19,8 @@ class CNNProjection(nn.Module):
                                hidden_sizes=[1024],
                                output_size=out_size,
                                activation=nn.functional.relu,
-                               dropout=dropout)
+                               dropout=dropout,
+                               activate_last=True)
         self.output_size = out_size
 
     def convolve(self, x):
@@ -42,8 +43,6 @@ class CNNProjection(nn.Module):
         x = self.embed(x)  # (N, W, D)
         x = self.convolve(x)  # (N, len(Ks)*Co]
         x = self.fc(x)
-        #x = F.relu(self.fc1(x))  # (N, C)
-        # x = self.dropout(x)
         x = self.l2norm(x)
         return x
 
@@ -52,7 +51,8 @@ class CNNProjection(nn.Module):
 
 
 class FFProjection(nn.Module):
-    def __init__(self, input_size, hidden_sizes, output_size, activation=nn.functional.relu, dropout=0.5):
+    def __init__(self, input_size, hidden_sizes, output_size, activation=nn.functional.relu, dropout=0.5,
+                 activate_last=False):
         super(FFProjection, self).__init__()
         sizes = [input_size] + hidden_sizes + [output_size]
         self.ff = nn.ModuleList([
@@ -60,11 +60,14 @@ class FFProjection(nn.Module):
         ])
         self.activation = activation
         self.dropout = nn.Dropout(p=dropout)
+        self.activate_last = activate_last
 
     def forward(self, x):
-        for linear in self.ff[:-1]:
-            x = self.dropout(self.activation(linear(x)))
-        x = self.ff[-1](x)
+        last_layer_idx = len(self.ff)-1
+        for i,linear in enumerate(self.ff):
+            x = linear(x)
+            if i < last_layer_idx or self.activate_last:
+                x = self.dropout(self.activation(x))
         return x