mxnet CrossEntropy 测试
【摘要】
from __future__ import absolute_importfrom __future__ import divisionfrom __future__ import print_functionimport os# import sys# import math import mxnet as mximport mxnet.gluon.los...
-
from __future__ import absolute_import
-
from __future__ import division
-
from __future__ import print_function
-
import os
-
# import sys
-
# import math
-
-
import mxnet as mx
-
import mxnet.gluon.loss as gloss
-
#
-
import numpy as np
-
-
a = np.zeros((2,3))
-
b = np.zeros((2,1))
-
pred = mx.nd.array(a)
-
label = mx.nd.array(b)
-
loss = gloss.SoftmaxCrossEntropyLoss()
-
print("loss",loss(pred, label).asnumpy())
-
-
-
-
def _softmax(x):
-
step1 = x.exp()
-
step2 = step1.sum(axis=1,keepdims=True)
-
return step1 / step2
-
ce = mx.metric.CrossEntropy()
-
-
ce.update(label, _softmax(pred))
-
loss = ce.get()
-
-
print(loss)
常见函数笔记
-
def softmax(o):
-
"""softmax函数."""
-
o_exp = o.exp()
-
return o_exp / o_exp.sum(axis=1, keepdims=True)
-
-
-
def net(x):
-
"""网络模型."""
-
return softmax(nd.dot(x.reshape((-1, num_pixe
文章来源: blog.csdn.net,作者:网奇,版权归原作者所有,如需转载,请联系作者。
原文链接:blog.csdn.net/jacke121/article/details/116714213
【版权声明】本文为华为云社区用户转载文章,如果您发现本社区中有涉嫌抄袭的内容,欢迎发送邮件进行举报,并提供相关证据,一经查实,本社区将立刻删除涉嫌侵权内容,举报邮箱:
cloudbbs@huaweicloud.com
- 点赞
- 收藏
- 关注作者
评论(0)