T2TDecoder.h 2.55 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24
/* NiuTrans.Tensor - an open-source tensor library
 * Copyright (C) 2018, Natural Language Processing Lab, Northestern University. 
 * All rights reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

/*
 * $Created by: XIAO Tong (xiaotong@mail.neu.edu.cn) 2018-07-31
 */

#ifndef __T2TDECODER_H__
#define __T2TDECODER_H__

25 26
#include "T2TEncoder.h"

27 28
namespace transformer
{
xiaotong committed
29 30 31
    
#define DECODING_NAME "decoding"
#define DECODING_INPUT_NAME "decoding_input"
32

33
class AttDecoder
34
{
35
public:
36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55

    /* device id */
    int devID;

    /* layer number */
    int nlayer;

    /* hidden layer size of the FNN layer */
    int hSize;

    /* embedding size */
    int eSize;

    /* vocabulary size */
    int vSize;

    /* dropout probability */
    DTYPE dropoutP;

    /* some positions can be ignored in attention. this is useful in lm where the first position needs
56
     * special design for the attention model. */
57 58 59 60 61 62 63 64 65
    int ignored;

    /* embedding of word at each position */
    T2TEmbedder embedder;

    /* FNN model of each layer */
    T2TFNN * fnns;

    /* attention model of each layer */
huchi committed
66
    T2TAttention * selfAtt;
67 68

    /* layer normalization for attention */
huchi committed
69
    T2TLN * selfAttLayerNorms;
70

71
    /* layer normalization for decoder */
huchi committed
72
    T2TLN * decoderLayerNorm;
73

74 75 76 77 78 79
    /* input tensor of the encoder */
    XTensor * input;

    /* output tensor of the encoder */
    XTensor * output;

80
    /* encoder-decoder attention model of each layer */
huchi committed
81
    T2TAttention * enDeAtt;
82

83
    /* layer normalization for encoder-decoder attention */
huchi committed
84
    T2TLN * enDeAttLayerNorms;
85 86

    /* layer cache list */
huchi committed
87
    Cache* selfAttCache;
88 89

    /* layer cache list */
huchi committed
90
    Cache* enDeAttCache;
91

92
public:
93 94 95 96 97 98
    /* constructor */
    AttDecoder();

    /* deconstructor */
    ~AttDecoder();

99
    /* initialize the model */
100 101
    void InitModel(int argc, char ** argv, 
                   bool myIsMasked, int myIgnored, 
102
                   int myDevID = -1);
103 104

    /* make the decoding network */
105
    XTensor Make(XTensor &inputDec, XTensor &outputEnc, XTensor *mask, XTensor &maskEncDec, bool isTraining);
106 107 108 109
};

}

110
#endif