LogSoftmax.cuh 1.59 KB
Newer Older
xiaotong committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32
/* NiuTrans.Tensor - an open-source tensor library
* Copyright (C) 2017, Natural Language Processing Lab, Northestern University.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*   http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/*
* $Created by: XIAO Tong (email: xiaotong@mail.neu.edu.cn) 2018-04-26
*/

#ifndef __LOGSOFTMAX_CUH__
#define __LOGSOFTMAX_CUH__

#include "../XTensor.h"
#include "Loss.h"

namespace nts { // namespace nts(NiuTrans.Tensor)

#ifdef USE_CUDA

/* log scale softmax y = log(e^x / \sum_{i} e^{x_i}) (Cuda version) */
33
void _CudaLogSoftmax(const XTensor * input, XTensor * output, int leadDim);
xiaotong committed
34 35

/* log scale softmax y = log(e^x / \sum_{i} e^{x_i}) (Cuda version) */
36
void _CudaLogSoftmaxSumMax(XTensor * x, XTensor * y, int leadDim, XTensor * sum, XTensor * max);
xiaotong committed
37 38

/* de/dx (Cuda version) */
39
void _CudaLogSoftmaxBackward(XTensor * gold, XTensor * y, XTensor * x,
40 41
                            XTensor * dedy, XTensor * dedx, 
                            XTensor * padding, int leadDim, 
xiaotong committed
42 43 44 45 46 47 48
                            LOSS_FUNCTION_NAME lossName);

#endif // USE_CUDA

} // namespace nts(NiuTrans.Tensor)

#endif // __LOGSOFTMAX_CUH__