-
对于这题尝试用torch 实现一下交叉熵计算,然后直接得出结果就好了
import torch
import torch.nn.functional as F
import math
# python中打出ln函数: https://blog.csdn.net/qq_45706006/article/details/123004542
def cal_softmax(x, dim=0):
x = torch.tensor(x, dtype=torch.float)
y = F.softmax(x, dim=dim)
return y
def cal_cross_entropy_loss(x, target):
# cross entropy:https://blog.csdn.net/u014380165/article/details/77284921
y = cal_softmax(x)
# 添加batch维度,1
y = torch.unsqueeze(y, 0)
loss = F.cross_entropy(y, target)
print(y, loss)
if __name__ == '__main__':
# 多类cross entropy loss计算
target = torch.tensor([3], dtype=torch.long)
x = [math.log(20), math.log(40), math.log(60), math.log(80)]
cal_cross_entropy_loss(x, target)
x = [math.log(10), math.log(30), math.log(50), math.log(90)]
cal_cross_entropy_loss(x, target)
# bug记录:PYTORCH 损失函数 INDEXERROR: DIMENSION OUT OF RANGE (EXPECTED TO BE IN RANGE OF [-1, 0], BUT GOT 1)
# https://www.freesion.com/article/9306918693/
参考
- 果壳深度学习2020试卷
- 卷积神经网络系列之softmax,softmax loss和cross entropy的讲解
- python中打出ln函数
- PYTORCH 损失函数 INDEXERROR: DIMENSION OUT OF RANGE (EXPECTED TO BE IN RANGE OF [-1, 0], BUT GOT 1)



