Commit d952db19 by xiaotong

add LogSoftmax

parent 8703cf97
......@@ -20,9 +20,10 @@
*/
#include <math.h>
#include "../XUtility.h"
#include "LogSoftmax.h"
#include "LogSoftmax.cuh"
#include "../XName.h"
#include "../XUtility.h"
#include "../core/reduce/ReduceSum.h"
#include "../core/reduce/ReduceMax.h"
#include "../core/movement/CopyValues.h"
......@@ -160,6 +161,25 @@ void _LogSoftmax(const XTensor * x, XTensor * y, int leadDim)
ShowNTErrors("TODO!");
}
/*
log scale softmax y = log(e^x / \sum_{i} e^{x_i}) (return a structure)
>> x - input vector
>> leadDim - leading dimension (along which we perform reduction)
<< return - result
*/
XTensor LogSoftmax(const XTensor &x, int leadDim)
{
XTensor y(&x);
y.SetTMP();
_LogSoftmax(&x, &y, leadDim);
XLink::MakeLink(&x, NULL, &y, FUNC_LOGSOFTMAX);
XLink::AddParamToHeadInt(&y, leadDim);
return y;
}
/*
backward computation for dense matrices with default data type
......
......@@ -30,6 +30,9 @@ namespace nts{ // namespace nts(NiuTrans.Tensor)
/* log scale softmax y = log(e^x / \sum_{i} e^{x_i}) */
void _LogSoftmax(const XTensor * x, XTensor * y, int leadDim);
/* log scale softmax y = log(e^x / \sum_{i} e^{x_i}) (return a structure) */
XTensor LogSoftmax(const XTensor &x, int leadDim);
/* de/dx */
void _LogSoftmaxBackward(XTensor * gold, XTensor * y, XTensor * x,
XTensor * dedy, XTensor * dedx,
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论