Commit 48bdcb49 by liyinqiao

Bug fixed and clean codes.

1. Backward bugs fixed.
2. Fix the minor errors.
parent ca1f1843
......@@ -34,7 +34,14 @@ void XFuncGrad::MakeGrad(XTensor * node, bool isEfficient)
XLink &income = node->income;
int operID = income.typeID;
CheckNTErrors(node->grad != NULL, "No gradient found!");
if(!isEfficient){
CheckNTErrors(node->grad != NULL, "No gradient found!");
}
else{
CheckNTErrors(!node->isGrad || node->grad != NULL, "No gradient found!");
}
//CheckNTErrors(node->grad != NULL, "No gradient found!");
CheckNTErrors(income.tailNum == 1, "Too many input tensors for the function!");
XTensor * input = income.tails[0];
......
......@@ -32,33 +32,38 @@
namespace nts{
/* compute dE/dx of a node */
void XShapeGrad::MakeGrad(XTensor * node, bool isEfficent)
void XShapeGrad::MakeGrad(XTensor * node, bool isEfficient)
{
CheckNTErrors(node->grad != NULL, "No gradient found!");
if(!isEfficient){
CheckNTErrors(node->grad != NULL, "No gradient found!");
}
else{
CheckNTErrors(!node->isGrad || node->grad != NULL, "No gradient found!");
}
XLink &income = node->income;
int operID = income.typeID;
if(operID == MOVEMENT_COPYINDEXED)
GradCopyIndexed(node, isEfficent);
GradCopyIndexed(node, isEfficient);
else if(operID == MOVEMENT_GATHER)
GradGather(node, isEfficent);
GradGather(node, isEfficient);
else if (operID == MOVEMENT_DROPOUTWITHINDEX)
GradDropoutWithIndex(node, isEfficent);
GradDropoutWithIndex(node, isEfficient);
else if(operID == SHAPE_MERGE)
GradMerge(node, isEfficent);
GradMerge(node, isEfficient);
else if(operID == SHAPE_MERGE_LIST)
GradMergeList(node, isEfficent);
GradMergeList(node, isEfficient);
else if(operID == SHAPE_RESHAPE)
GradReshape(node, isEfficent);
GradReshape(node, isEfficient);
else if(operID == SHAPE_SPLIT)
GradSplit(node, isEfficent);
GradSplit(node, isEfficient);
else if(operID == SHAPE_SPLIT_LIST)
GradSplitList(node, isEfficent);
GradSplitList(node, isEfficient);
else if (operID == SHAPE_TRANSPOSE)
GradTranspose(node, isEfficent);
GradTranspose(node, isEfficient);
else if(operID == SHAPE_UNSQUEEZE)
GradUnsqueeze(node, isEfficent);
GradUnsqueeze(node, isEfficient);
else{
ShowNTErrors("TODO!");
}
......
......@@ -34,7 +34,7 @@ class XShapeGrad
public:
/* compute dE/dx of a node */
static
void MakeGrad(XTensor * node, bool isEfficent);
void MakeGrad(XTensor * node, bool isEfficient);
/* indicates whether the node is for a shaping operation */
static
......
......@@ -293,7 +293,7 @@ void _LogSoftmaxBackward(XTensor * gold, XTensor * y, XTensor * x,
LOSS_FUNCTION_NAME lossName)
{
CheckNTErrors((!dedx->isSparse), "The gradient matrix must be dense!");
CheckNTErrors((gold != NULL), "The gold standard cannot be empty!");
CheckNTErrors((gold != NULL || lossName == NOLOSS), "The gold standard cannot be empty!");
if(leadDim < 0)
leadDim = y->order - 1;
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论