softmax后出现nan

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
>>> import torch
>>> import torch.nn.functional as F
>>> F.softmax(torch.Tensor([0, float('-inf')]), -1)
tensor([ 1.0000, 0.0000])
>>> F.softmax(torch.Tensor([0, float('inf')]), -1) # should give [0.0, 1.0]
tensor([ nan, nan])
>>> F.log_softmax(torch.Tensor([0, float('-inf')]), -1)
tensor([ 0.0000, -inf])
>>> F.log_softmax(torch.Tensor([0, float('inf')]), -1)
tensor([ nan, nan])
>>> F.softmax(torch.Tensor([float('-inf'), 0, float('-inf')]), -1)
tensor([ 0.0000, 1.0000, 0.0000])
>>> F.softmax(torch.Tensor([0, float('inf'), 0]), -1) # should give [0.0, 1.0, 0.0]
tensor([ nan, nan, nan])
>>> F.softmax(torch.Tensor([float('-inf'), 0, float('inf')]), -1) # should give [0.0, 0.0, 1.0]
tensor([ nan, nan, nan])
请作者喝一杯咖啡☕️