Merge pull request #690 from shashwath94/projadpsftmax_fix

Transformer XL ProjectedAdaptiveLogSoftmax output fix
This commit is contained in:
Thomas Wolf 2019-06-15 23:14:10 +02:00 committed by GitHub
commit 80684f6f86
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 2 additions and 2 deletions

View File

@ -114,10 +114,10 @@ class ProjectedAdaptiveLogSoftmax(nn.Module):
logit = self._compute_logit(hidden, self.out_layers[0].weight,
self.out_layers[0].bias, self.out_projs[0])
if target is not None:
output = -F.log_softmax(logit, dim=-1) \
out = -F.log_softmax(logit, dim=-1) \
.gather(1, target.unsqueeze(1)).squeeze(1)
else:
output = F.log_softmax(logit, dim=-1)
out = F.log_softmax(logit, dim=-1)
else:
# construct weights and biases
weights, biases = [], []