bug in log sofmax

This commit is contained in:
Alejandro Moreo Fernandez 2020-05-02 23:29:59 +02:00
parent 0fbbd64b05
commit d6f2f16de1
1 changed files with 4 additions and 2 deletions

View File

@ -53,7 +53,8 @@ class AuthorshipAttributionClassifier(nn.Module):
logits = self.forward(xi)
loss = criterion(logits, torch.as_tensor(yi).to(self.device))
losses.append(loss.item())
prediction = tensor2numpy(torch.argmax(nn.functional.log_softmax(logits), dim=1).view(-1))
logits = nn.functional.log_softmax(logits, dim=1)
prediction = tensor2numpy(torch.argmax(logits, dim=1).view(-1))
predictions.append(prediction)
val_loss = np.mean(losses)
predictions = np.concatenate(predictions)
@ -71,7 +72,8 @@ class AuthorshipAttributionClassifier(nn.Module):
for xi in tqdm(batcher.epoch(x), desc='test'):
xi = self.padder.transform(xi)
logits = self.forward(xi)
prediction = tensor2numpy(nn.functional.log_softmax(torch.argmax(logits, dim=1).view(-1)))
logits = nn.functional.log_softmax(logits, dim=1)
prediction = tensor2numpy(torch.argmax(logits, dim=1).view(-1))
predictions.append(prediction)
return np.concatenate(predictions)