Commit 6a4741bc by xiaotong

clean the code

parent d992a0e9
......@@ -30,7 +30,7 @@ namespace nts { // namespace nts(NiuTrans.Tensor)
void _Gather(const XTensor * s, XTensor * t, int dim, int * srcIndex, int indexSize);
/* gather selected sub-tensors (return a XTensor structure)
make a new tensor to keep the result and return it */
make a new tensor to keep the result and return it */
XTensor Gather(const XTensor &s, int dim, int * srcIndex, int indexSize);
} // namespace nts(NiuTrans.Tensor)
......
......@@ -62,7 +62,6 @@ void _Spread(XTensor * source, XTensor * collection, int dim,
int * srcIndex, int indexSize, int * collIndex)
{
int order = source->order;
int size = source->GetDim(dim);
CheckNTErrors(source->dataType == DEFAULT_DTYPE, "TODO!");
CheckNTErrors(dim >= 0 && dim < order, "Illegal dimension!");
......@@ -150,7 +149,6 @@ void _SpreadForGather(XTensor * source, XTensor * collection, int dim,
int * srcIndex, int indexSize, int * collIndex)
{
int order = source->order;
int size = source->GetDim(dim);
CheckNTErrors(source->dataType == DEFAULT_DTYPE, "TODO!");
CheckNTErrors(dim >= 0 && dim < order, "Illegal dimension!");
......
......@@ -61,42 +61,39 @@ void _CrossEntropy(const XTensor * output, const XTensor * gold,
CheckNTErrors(loss->order == output->order - 1, "Wrong loss dimension!");
CheckNTErrors(gold->dataType == DEFAULT_DTYPE && output->dataType == DEFAULT_DTYPE, "TODO!");
XTensor * logInter = NewTensorBuf(output, output->devID, output->mem);
XTensor * mulInter = NewTensorBuf(output, output->devID, output->mem);
XTensor * negInter = NewTensorBuf(output, output->devID, output->mem);
XTensor * logBuf = NewTensorBuf(output, output->devID, output->mem);
XTensor * mulBuf = NewTensorBuf(output, output->devID, output->mem);
XTensor * negBuf = NewTensorBuf(output, output->devID, output->mem);
/* l = log(output) */
_Log(output, logBuf);
if(weight != NULL){
XTensor * weightBuf = NewTensorBuf(output, output->devID, output->mem);
/* multiply gold and weight by broadcast wg = mulDim(g * w) */
/* multiply gold with weight by broadcast wg = mulDim(g * w) */
_MultiplyDim(gold, weight, weightBuf, n, 0);
/* multiply weighted gold and log(output) wgl = mul(wg, l) */
/* multiply weighted gold with log(output) wgl = mul(wg, l) */
_Multiply(weightBuf, logBuf, mulBuf, 0);
DelTensorBuf(weightBuf);
}
else{
/* multiply gold and log(output) gl = mul(g, l) */
/* multiply gold with log(output) gl = mul(g, l) */
_Multiply(gold, logBuf, mulBuf, 0);
}
/* negate multiply result n = negate(mul) */
/* negate result n = negate(mul) */
_NegateMe(mulBuf);
_ReduceSum(mulBuf, loss, n);
DelTensorBuf(negInter);
DelTensorBuf(mulInter);
DelTensorBuf(logInter);
DelTensorBuf(mulBuf);
DelTensorBuf(logBuf);
}
/*
compute the cross entropy loss (implementation manually)
compute the cross entropy loss (faster implementation with optimized code)
loss = sum_{i} (-gold_i * log(output_i))
where gold and output are distributions
......@@ -108,7 +105,7 @@ where gold and output are distributions
>> padding - specify a target value that is ignored and does not contribute to the loss computation
>> leadingDim - the leading dimension for the output
*/
void _CrossEntropyManual(const XTensor * output, const XTensor * gold,
void _CrossEntropyFast(const XTensor * output, const XTensor * gold,
XTensor * loss, const XTensor * weight,
const XTensor * padding, int leadingDim)
{
......@@ -263,21 +260,22 @@ DTYPE _CrossEntropy(const XTensor * output, const XTensor * gold,
XTensor * logBuf = NewTensorBuf(output, output->devID, output->mem);
XTensor * mulBuf = NewTensorBuf(output, output->devID, output->mem);
XTensor * negBuf = NewTensorBuf(output, output->devID, output->mem);
/* l = log(output) */
_Log(output, logBuf);
if(weight != NULL){
XTensor * weightBuf = NewTensorBuf(output, output->devID, output->mem);
/* multiply gold and weight by broadcast wg = mulDim(g * w) */
/* multiply gold with weight by broadcast wg = mulDim(g * w) */
_MultiplyDim(gold, weight, weightBuf, n, 0);
/* multiply weighted gold and log(output) wgl = mul(wg, l) */
/* multiply weighted gold with log(output) wgl = mul(wg, l) */
_Multiply(weightBuf, logBuf, mulBuf, 0);
DelTensorBuf(weightBuf);
}
else{
/* multiply gold and log(output) gl = mul(g, l) */
/* multiply gold with log(output) gl = mul(g, l) */
_Multiply(gold, logBuf, mulBuf, 0);
}
......@@ -291,7 +289,6 @@ DTYPE _CrossEntropy(const XTensor * output, const XTensor * gold,
/* reduce sum all classes */
_ReduceSum(mulBuf, lossInter, n);
DelTensorBuf(negBuf);
DelTensorBuf(mulBuf);
DelTensorBuf(logBuf);
......@@ -334,7 +331,7 @@ DTYPE _CrossEntropy(const XTensor * output, const XTensor * gold,
}
/*
compute the cross entropy loss (implementation manually)
compute the cross entropy loss (faster implementation with optimized code)
loss = sum_{i} (-gold_i * log(output_i))
where gold and output are distributions
......@@ -347,7 +344,7 @@ where gold and output are distributions
>> leadingDim - the leading dimension for the output
<< return - the cross entropy loss that is a scalar
*/
DTYPE _CrossEntropyManual(const XTensor * output, const XTensor * gold,
DTYPE _CrossEntropyFast(const XTensor * output, const XTensor * gold,
LOSS_COMPUTE_WAY reduceWay, const XTensor * weight,
const XTensor * padding, int leadingDim)
{
......@@ -459,7 +456,7 @@ DTYPE _CrossEntropyManual(const XTensor * output, const XTensor * gold,
}
/*
backward compuation for cross entropy function (tensor version)
backward compuation for cross entropy function
loss = sum_{i} (-t_i * log(y_i))
dE/dy_i = -t_i / y_i
......
......@@ -111,7 +111,7 @@ where gold and output are distributions
>> padding - specify a target value that is ignored and does not contribute to the loss computation
>> leadingDim - the leading dimension for the output
*/
void _CudaCrossEntropyManual(const XTensor * output, const XTensor * gold,
void _CudaCrossEntropyFast(const XTensor * output, const XTensor * gold,
XTensor * loss, const XTensor * weight,
const XTensor * padding, int leadingDim)
{
......@@ -201,7 +201,7 @@ where gold and output are distributions
>> leadingDim - the leading dimension for the output
<< return - the cross entropy loss that is a scalar
*/
DTYPE _CudaCrossEntropyManual(const XTensor * output, const XTensor * gold,
DTYPE _CudaCrossEntropyFast(const XTensor * output, const XTensor * gold,
LOSS_COMPUTE_WAY reduceWay, const XTensor * weight,
const XTensor * padding, int leadingDim)
{
......
......@@ -27,13 +27,13 @@
namespace nts{ // namespace nts(NiuTrans.Tensor)
/* compute the cross entropy loss (tensor version) */
void _CudaCrossEntropyManual(const XTensor * output, const XTensor * gold,
/* compute the cross entropy loss */
void _CudaCrossEntropyFast(const XTensor * output, const XTensor * gold,
XTensor * loss, const XTensor * weight = NULL,
const XTensor * padding = NULL, int leadingDim = -1);
/* compute the cross entropy loss (scalar version) */
DTYPE _CudaCrossEntropyManual(const XTensor * output, const XTensor * gold,
/* compute the cross entropy loss */
DTYPE _CudaCrossEntropyFast(const XTensor * output, const XTensor * gold,
LOSS_COMPUTE_WAY reduceWay, const XTensor * weight = NULL,
const XTensor * padding = NULL, int leadingDim = -1);
......
......@@ -31,23 +31,23 @@ REDUCE_SUM,
REDUCE_MEAN
};
/* compute the cross entropy loss (tensor version) */
/* compute the cross entropy loss */
void _CrossEntropy(const XTensor * output, const XTensor * gold,
XTensor * loss, const XTensor * weight = NULL,
const XTensor * padding = NULL, int leadingDim = -1);
/* compute the cross entropy loss (tensor version) */
void _CrossEntropyManual(const XTensor * output, const XTensor * gold,
/* compute the cross entropy loss */
void _CrossEntropyFast(const XTensor * output, const XTensor * gold,
XTensor * loss, const XTensor * weight = NULL,
const XTensor * padding = NULL, int leadingDim = -1);
/* compute the cross entropy loss (scalar version) */
/* compute the cross entropy loss (return the loss) */
DTYPE _CrossEntropy(const XTensor * output, const XTensor * gold,
LOSS_COMPUTE_WAY reduceWay, const XTensor * weight = NULL,
const XTensor * padding = NULL, int leadingDim = -1);
/* compute the cross entropy loss (scalar version) */
DTYPE _CrossEntropyManual(const XTensor * output, const XTensor * gold,
/* compute the cross entropy loss (return the loss) */
DTYPE _CrossEntropyFast(const XTensor * output, const XTensor * gold,
LOSS_COMPUTE_WAY reduceWay = REDUCE_MEAN, const XTensor * weight = NULL,
const XTensor * padding = NULL, int leadingDim = -1);
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论