Commit 007176c8 by 曹润柘

合并分支 'caorunzhe' 到 'master'

Caorunzhe

查看合并请求 !384
parents 9a1eeda6 3557eb23
......@@ -1257,7 +1257,7 @@ L(\mathbi{Y},\widehat{\mathbi{Y}}) = \sum_{j=1}^n L_{\textrm{ce}}(\mathbi{y}_j,\
\vspace{0.5em}
\item 注意力机制的使用是机器翻译乃至整个自然语言处理近几年获得成功的重要因素之一\upcite{bahdanau2014neural,DBLP:journals/corr/LuongPM15}。早期,有研究者尝试将注意力机制和统计机器翻译的词对齐进行统一\upcite{WangNeural,He2016ImprovedNM,li-etal-2019-word}。最近,也有大量的研究工作对注意力机制进行改进,比如,使用自注意力机制构建翻译模型等\upcite{vaswani2017attention}。而对注意力模型的改进也成为了自然语言处理中的热点问题之一。在{\chapterfifteen}会对机器翻译中不同注意力模型进行进一步讨论。
\vspace{0.5em}
\item 一般来说,神经机器翻译的计算过程是没有人工干预的,翻译流程也无法用人类的知识直接进行解释,因此一个有趣的方向是在神经机器翻译中引入先验知识,使得机器翻译的行为更“像”人。比如,可以使用句法树来引入人类的语言学知识\upcite{Yang2017TowardsBH,Wang2019TreeTI},基于句法的神经机器翻译也包含大量的树结构的神经网络建模\upcite{DBLP:journals/corr/abs-1809-01854,DBLP:journals/corr/abs-1808-09374}。此外,也可以把用户定义的词典或者翻译记忆加入到翻译过程来\upcite{DBLP:journals/corr/ZhangZ16c,zhang-etal-2017-prior,duan-etal-2020-bilingual,cao-xiong-2018-encoding},使得用户的约束可以直接反映到机器翻译的结果上来。先验知识的种类还有很多,包括词对齐\upcite{li-etal-2019-word}、 篇章信息\upcite{Werlen2018DocumentLevelNM,DBLP:journals/corr/abs-1805-10163} 等等,都是神经机器翻译中能够使用的信息。
\item 一般来说,神经机器翻译的计算过程是没有人工干预的,翻译流程也无法用人类的知识直接进行解释,因此一个有趣的方向是在神经机器翻译中引入先验知识,使得机器翻译的行为更“像”人。比如,可以使用句法树来引入人类的语言学知识\upcite{Yang2017TowardsBH,Wang2019TreeTI},基于句法的神经机器翻译也包含大量的树结构的神经网络建模\upcite{DBLP:journals/corr/abs-1809-01854,DBLP:journals/corr/abs-1808-09374}。此外,也可以把用户定义的词典或者翻译记忆加入到翻译过程来\upcite{DBLP:journals/corr/ZhangZ16c,zhang-etal-2017-prior,duan-etal-2020-bilingual,cao-xiong-2018-encoding},使得用户的约束可以直接反映到机器翻译的结果上来。先验知识的种类还有很多,包括词对齐\upcite{li-etal-2019-word,DBLP:conf/emnlp/MiWI16,DBLP:conf/coling/LiuUFS16}、 篇章信息\upcite{Werlen2018DocumentLevelNM,DBLP:journals/corr/abs-1805-10163,DBLP:conf/acl/LiLWJXZLL20} 等等,都是神经机器翻译中能够使用的信息。
\end{itemize}
%\newlength{\bc}
\setlength{\bc}{0.4cm}
%\newlength{\bcc}
\setlength{\bcc}{0.4cm}
\begin{tikzpicture}
\begin{scope}
%\tikzstyle{every node}=[scale=0.8]
\tikzstyle{line} = [dash pattern=on 2pt off 1pt,line width=0.5pt]
\tikzstyle{cir} = [thin,fill=blue!8,draw,circle,minimum size =0.5em,drop shadow={shadow xshift=0.15em, shadow yshift=-0.1em}]
\tikzstyle{word} = [inner sep=0pt, font=\scriptsize,minimum height=\bc]
\tikzstyle{word} = [inner sep=0pt, font=\scriptsize,minimum height=\bcc]
\draw[fill=red!8,line width=0.2pt] (0cm,0cm+1*\bc) rectangle (0cm+4*\bc,0cm+7*\bc);
\draw[fill=cyan!14,line width=0.2pt] (0cm,0cm) rectangle (0cm+4*\bc,0cm+1*\bc);
\draw[fill=cyan!14,line width=0.2pt] (0cm,0cm+7*\bc) rectangle (0cm+4*\bc,0cm+8*\bc);
\draw[step=\bc] (0cm,0cm) grid (0cm+4*\bc,0cm+8*\bc);
%\draw[line width=0.7pt] (0cm,0cm) rectangle (0cm+4*\bc,0cm+8*\bc);
\draw[red!50,line width=1.8pt] (0cm,0cm+5*\bc) rectangle (0cm+4*\bc,0cm+8*\bc);
\draw[ugreen!50,line width=1.8pt] (0cm,0cm+1*\bc) rectangle (0cm+4*\bc,0cm+4*\bc);
\draw[fill=red!8,line width=0.2pt] (0cm,0cm+1*\bcc) rectangle (0cm+4*\bcc,0cm+7*\bcc);
\draw[fill=cyan!14,line width=0.2pt] (0cm,0cm) rectangle (0cm+4*\bcc,0cm+1*\bcc);
\draw[fill=cyan!14,line width=0.2pt] (0cm,0cm+7*\bcc) rectangle (0cm+4*\bcc,0cm+8*\bcc);
\draw[step=\bcc] (0cm,0cm) grid (0cm+4*\bcc,0cm+8*\bcc);
%\draw[line width=0.7pt] (0cm,0cm) rectangle (0cm+4*\bcc,0cm+8*\bcc);
\draw[red!50,line width=1.8pt] (0cm,0cm+5*\bcc) rectangle (0cm+4*\bcc,0cm+8*\bcc);
\draw[ugreen!50,line width=1.8pt] (0cm,0cm+1*\bcc) rectangle (0cm+4*\bcc,0cm+4*\bcc);
\draw[fill=blue!8,xshift=5.0cm,yshift=1.0cm,line width=0.2pt] (0cm,0cm) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[step=\bc,xshift=5.0cm,yshift=1.0cm] (0cm,0cm) grid (0cm+1*\bc,0cm+6*\bc);
%\draw[line width=0.7pt,xshift=5.0cm,yshift=1.0cm] (0cm,0cm) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[fill=blue!8,xshift=5.0cm,yshift=1.0cm,line width=0.2pt] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[step=\bcc,xshift=5.0cm,yshift=1.0cm] (0cm,0cm) grid (0cm+1*\bcc,0cm+6*\bcc);
%\draw[line width=0.7pt,xshift=5.0cm,yshift=1.0cm] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[fill=blue!8,xshift=5.2cm,yshift=0.8cm,line width=0.2pt] (0cm,0cm) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[step=\bc,xshift=5.2cm,yshift=0.8cm] (0cm,0cm) grid (0cm+1*\bc,0cm+6*\bc);
%\draw[line width=0.7pt,xshift=5.2cm,yshift=0.8cm] (0cm,0cm) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[ugreen!50,line width=2pt,xshift=5.2cm,yshift=0.8cm] (0cm,0cm+1*\bc) rectangle (0cm+1*\bc,0cm+2*\bc);
\draw[fill=blue!8,xshift=5.2cm,yshift=0.8cm,line width=0.2pt] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[step=\bcc,xshift=5.2cm,yshift=0.8cm] (0cm,0cm) grid (0cm+1*\bcc,0cm+6*\bcc);
%\draw[line width=0.7pt,xshift=5.2cm,yshift=0.8cm] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[ugreen!50,line width=2pt,xshift=5.2cm,yshift=0.8cm] (0cm,0cm+1*\bcc) rectangle (0cm+1*\bcc,0cm+2*\bcc);
\draw[fill=blue!8,xshift=5.4cm,yshift=0.6cm,line width=0.2pt] (0cm,0cm) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[step=\bc,xshift=5.4cm,yshift=0.6cm] (0cm,0cm) grid (0cm+1*\bc,0cm+6*\bc);
%\draw[line width=0.7pt,xshift=5.4cm,yshift=0.6cm] (0cm,0cm) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[fill=blue!8,xshift=5.4cm,yshift=0.6cm,line width=0.2pt] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[step=\bcc,xshift=5.4cm,yshift=0.6cm] (0cm,0cm) grid (0cm+1*\bcc,0cm+6*\bcc);
%\draw[line width=0.7pt,xshift=5.4cm,yshift=0.6cm] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[fill=blue!8,xshift=5.6cm,yshift=0.4cm,line width=0.2pt] (0cm,0cm) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[step=\bc,xshift=5.6cm,yshift=0.4cm] (0cm,0cm) grid (0cm+1*\bc,0cm+6*\bc);
%\draw[line width=0.7pt,xshift=5.6cm,yshift=0.4cm] (0cm,0cm) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[red!50,line width=2pt,xshift=5.6cm,yshift=0.4cm] (0cm,0cm+5*\bc) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[fill=blue!8,xshift=5.6cm,yshift=0.4cm,line width=0.2pt] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[step=\bcc,xshift=5.6cm,yshift=0.4cm] (0cm,0cm) grid (0cm+1*\bcc,0cm+6*\bcc);
%\draw[line width=0.7pt,xshift=5.6cm,yshift=0.4cm] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[red!50,line width=2pt,xshift=5.6cm,yshift=0.4cm] (0cm,0cm+5*\bcc) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[red!50,line width=0.5pt] (0cm+4*\bc,0cm+8*\bc) -- ([xshift=5.6cm,yshift=0.4cm]0cm,0cm+6*\bc);
\draw[red!50,line width=0.5pt] (0cm+4*\bc,0cm+5*\bc) -- ([xshift=5.6cm,yshift=0.4cm]0cm,0cm+5*\bc);
\draw[red!50,line width=0.5pt] (0cm+4*\bcc,0cm+8*\bcc) -- ([xshift=5.6cm,yshift=0.4cm]0cm,0cm+6*\bcc);
\draw[red!50,line width=0.5pt] (0cm+4*\bcc,0cm+5*\bcc) -- ([xshift=5.6cm,yshift=0.4cm]0cm,0cm+5*\bcc);
\draw[ugreen!50,line width=0.5pt] (0cm+4*\bc,0cm+4*\bc) -- ([xshift=5.2cm,yshift=0.8cm]0cm,0cm+2*\bc);
\draw[ugreen!50,line width=0.5pt] (0cm+4*\bc,0cm+1*\bc) -- ([xshift=5.2cm,yshift=0.8cm]0cm,0cm+1*\bc);
\draw[ugreen!50,line width=0.5pt] (0cm+4*\bcc,0cm+4*\bcc) -- ([xshift=5.2cm,yshift=0.8cm]0cm,0cm+2*\bcc);
\draw[ugreen!50,line width=0.5pt] (0cm+4*\bcc,0cm+1*\bcc) -- ([xshift=5.2cm,yshift=0.8cm]0cm,0cm+1*\bcc);
\node[word] (w1) at (-0.5cm, 3.0cm) {$<$p$>$};
\node[word] (w2) at ([yshift=-\bc]w1) {今天};
\node[word] (w3) at ([yshift=-\bc]w2) {};
\node[word] (w4) at ([yshift=-\bc]w3) {};
\node[word] (w5) at ([yshift=-\bc]w4) {};
\node[word] (w6) at ([yshift=-\bc]w5) {日子};
\node[word] (w7) at ([yshift=-\bc]w6) {};
\node[word] (w8) at ([yshift=-\bc]w7) {$<$p$>$};
\node[word] (w2) at ([yshift=-\bcc]w1) {今天};
\node[word] (w3) at ([yshift=-\bcc]w2) {};
\node[word] (w4) at ([yshift=-\bcc]w3) {};
\node[word] (w5) at ([yshift=-\bcc]w4) {};
\node[word] (w6) at ([yshift=-\bcc]w5) {日子};
\node[word] (w7) at ([yshift=-\bcc]w6) {};
\node[word] (w8) at ([yshift=-\bcc]w7) {$<$p$>$};
\node[inner xsep=2pt,inner ysep=0pt,font=\footnotesize] (c1) at (0.8cm, 3.5cm) {$O$};
\draw[-latex] (c1.west) -- ([xshift=-0.4cm]c1.west);
......@@ -59,9 +59,9 @@
\draw[-latex] (c2.east) -- ([xshift=0.2cm, yshift=-0.2cm]c2.east);
%%%%%%%%%%%%%%%%%%%%%
\node[word] (sub) at ([xshift=9.5*\bc,yshift=3.5*\bc]w1) {$O$\ \ :输入通道数};
\node[word] (sub2) at ([yshift=-0.5*\bc]sub.south) {$N$\ \ :卷积核数量};
\node[word] (sub3) at ([xshift=-1.35em,yshift=-0.5*\bc]sub2.south) {$<$p$>$:填充};
\node[word] (sub) at ([xshift=9.5*\bcc,yshift=3.5*\bcc]w1) {$O$\ \ :输入通道数};
\node[word] (sub2) at ([yshift=-0.5*\bcc]sub.south) {$N$\ \ :卷积核数量};
\node[word] (sub3) at ([xshift=-1.35em,yshift=-0.5*\bcc]sub2.south) {$<$p$>$:填充};
\end{scope}
......
%\newlength{\bc}
\setlength{\bc}{0.4cm}
%\newlength{\bcc}
\setlength{\bcc}{0.4cm}
\begin{tikzpicture}
\begin{scope}
%\tikzstyle{every node}=[scale=0.8]
\tikzstyle{line} = [dash pattern=on 2pt off 1pt,line width=0.6pt]
\tikzstyle{cir} = [thin,fill=blue!8,draw,circle,minimum size =0.5em,drop shadow={shadow xshift=0.15em, shadow yshift=-0.1em}]
\tikzstyle{word} = [inner sep=0pt, font=\footnotesize,minimum height=\bc]
\tikzstyle{word} = [inner sep=0pt, font=\footnotesize,minimum height=\bcc]
\draw[fill=blue!8,xshift=0.3cm,yshift=0.5cm,line width=0.6pt] (0cm,0cm) rectangle (0cm+6*\bc,0cm+9*\bc);
\draw[ugreen!60,step=\bc,xshift=0.3cm,yshift=0.5cm,gray] (0cm,0cm) grid (0cm+6*\bc,0cm+9*\bc);
%\draw[line width=0.7pt,xshift=0.3cm,yshift=0.5cm] (0cm,0cm) rectangle (0cm+6*\bc,0cm+9*\bc);
\draw[red!60,line width=2pt,xshift=0.3cm,yshift=0.5cm] (0cm,0cm+2*\bc) rectangle (0cm+6*\bc,0cm+4*\bc);
\draw[fill=blue!8,xshift=0.3cm,yshift=0.5cm,line width=0.6pt] (0cm,0cm) rectangle (0cm+6*\bcc,0cm+9*\bcc);
\draw[ugreen!60,step=\bcc,xshift=0.3cm,yshift=0.5cm,gray] (0cm,0cm) grid (0cm+6*\bcc,0cm+9*\bcc);
%\draw[line width=0.7pt,xshift=0.3cm,yshift=0.5cm] (0cm,0cm) rectangle (0cm+6*\bcc,0cm+9*\bcc);
\draw[red!60,line width=2pt,xshift=0.3cm,yshift=0.5cm] (0cm,0cm+2*\bcc) rectangle (0cm+6*\bcc,0cm+4*\bcc);
\draw[thick,fill=blue!8,line width=0.6pt] (0cm,0cm) rectangle (0cm+6*\bc,0cm+9*\bc);
\draw[step=\bc,gray] (0cm,0cm) grid (0cm+6*\bc,0cm+9*\bc);
%\draw[line width=0.7pt] (0cm,0cm) rectangle (0cm+6*\bc,0cm+9*\bc);
\draw[red!60,line width=2pt] (0cm,0cm) rectangle (0cm+6*\bc,0cm+2*\bc);
\draw[ugreen!60,line width=2pt] (0cm,0cm+3*\bc) rectangle (0cm+6*\bc,0cm+6*\bc);
\draw[red!60,line width=2pt] (0cm,0cm+7*\bc) rectangle (0cm+6*\bc,0cm+9*\bc);
\draw[thick,fill=blue!8,line width=0.6pt] (0cm,0cm) rectangle (0cm+6*\bcc,0cm+9*\bcc);
\draw[step=\bcc,gray] (0cm,0cm) grid (0cm+6*\bcc,0cm+9*\bcc);
%\draw[line width=0.7pt] (0cm,0cm) rectangle (0cm+6*\bcc,0cm+9*\bcc);
\draw[red!60,line width=2pt] (0cm,0cm) rectangle (0cm+6*\bcc,0cm+2*\bcc);
\draw[ugreen!60,line width=2pt] (0cm,0cm+3*\bcc) rectangle (0cm+6*\bcc,0cm+6*\bcc);
\draw[red!60,line width=2pt] (0cm,0cm+7*\bcc) rectangle (0cm+6*\bcc,0cm+9*\bcc);
\draw[fill=blue!8,xshift=5.0cm,yshift=1.3cm,line width=0.6pt] (0cm,0cm) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[step=\bc,gray,xshift=5.0cm,yshift=1.3cm] (0cm,0cm) grid (0cm+1*\bc,0cm+6*\bc);
%\draw[xshift=5.0cm,yshift=1.3cm,line width=0.7pt] (0cm,0cm) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[ugreen!60,line width=2pt,xshift=5.0cm,yshift=1.3cm] (0cm,0cm+2*\bc) rectangle (0cm+1*\bc,0cm+3*\bc);
\draw[fill=blue!8,xshift=5.0cm,yshift=1.3cm,line width=0.6pt] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[step=\bcc,gray,xshift=5.0cm,yshift=1.3cm] (0cm,0cm) grid (0cm+1*\bcc,0cm+6*\bcc);
%\draw[xshift=5.0cm,yshift=1.3cm,line width=0.7pt] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[ugreen!60,line width=2pt,xshift=5.0cm,yshift=1.3cm] (0cm,0cm+2*\bcc) rectangle (0cm+1*\bcc,0cm+3*\bcc);
\draw [gray,fill=blue!8,line width=0.6pt](8cm,2.6cm) -- (8.4cm, 2.6cm) -- (9cm,1cm) -- (8.6cm, 1cm) -- (8cm,2.6cm);
\draw [gray](8.15cm,2.2cm) -- (8.55cm,2.2cm);
......@@ -35,48 +35,48 @@
\draw [gray,fill=blue!8,line width=0.6pt](11cm,2.2cm) -- (11.4cm, 2.2cm) -- (11.7cm,1.4cm) -- (11.3cm, 1.4cm) -- (11cm,2.2cm);
\draw [gray](11.15cm,1.8cm) -- (11.55cm,1.8cm);
\draw[ugreen!60,line] ([xshift=5.0cm,yshift=1.3cm]0cm+1*\bc,0cm+6*\bc) -- (8cm,2.6cm);
\draw[ugreen!60,line] ([xshift=5.0cm,yshift=1.3cm]0cm+1*\bc,0cm) -- (8.15cm,2.2cm);
\draw[ugreen!60,line] ([xshift=5.0cm,yshift=1.3cm]0cm+1*\bcc,0cm+6*\bcc) -- (8cm,2.6cm);
\draw[ugreen!60,line] ([xshift=5.0cm,yshift=1.3cm]0cm+1*\bcc,0cm) -- (8.15cm,2.2cm);
\draw[fill=blue!8,xshift=5.2cm,yshift=1.0cm,line width=0.6pt] (0cm,0cm) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[step=\bc,gray,xshift=5.2cm,yshift=1.0cm] (0cm,0cm) grid (0cm+1*\bc,0cm+6*\bc);
%\draw[line width=0.7pt,xshift=5.2cm,yshift=1.0cm] (0cm,0cm) rectangle (0cm+1*\bc,0cm+6*\bc);
\draw[fill=blue!8,xshift=5.2cm,yshift=1.0cm,line width=0.6pt] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[step=\bcc,gray,xshift=5.2cm,yshift=1.0cm] (0cm,0cm) grid (0cm+1*\bcc,0cm+6*\bcc);
%\draw[line width=0.7pt,xshift=5.2cm,yshift=1.0cm] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+6*\bcc);
\draw[fill=blue!8,xshift=5.4cm,yshift=0.3cm,line width=0.6pt] (0cm,0cm) rectangle (0cm+1*\bc,0cm+7*\bc);
\draw[step=\bc,gray,xshift=5.4cm,yshift=0.3cm] (0cm,0cm) grid (0cm+1*\bc,0cm+7*\bc);
%\draw[line width=0.7pt,xshift=5.4cm,yshift=0.3cm] (0cm,0cm) rectangle (0cm+1*\bc,0cm+7*\bc);
\draw[fill=blue!8,xshift=5.4cm,yshift=0.3cm,line width=0.6pt] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+7*\bcc);
\draw[step=\bcc,gray,xshift=5.4cm,yshift=0.3cm] (0cm,0cm) grid (0cm+1*\bcc,0cm+7*\bcc);
%\draw[line width=0.7pt,xshift=5.4cm,yshift=0.3cm] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+7*\bcc);
\draw[fill=blue!8,xshift=5.6cm,yshift=0cm,line width=0.6pt] (0cm,0cm) rectangle (0cm+1*\bc,0cm+7*\bc);
\draw[step=\bc,gray,xshift=5.6cm,yshift=0cm] (0cm,0cm) grid (0cm+1*\bc,0cm+7*\bc);
%\draw[line width=0.7pt,xshift=5.6cm,yshift=0cm] (0cm,0cm) rectangle (0cm+1*\bc,0cm+7*\bc);
\draw[red!60,line width=2pt,xshift=5.6cm,yshift=0cm] (0cm,0cm) rectangle (0cm+1*\bc,0cm+1*\bc);
\draw[red!60,line width=2pt,xshift=5.6cm,yshift=0cm] (0cm,0cm+2*\bc) rectangle (0cm+1*\bc,0cm+3*\bc);
\draw[red!60,line width=2pt,xshift=5.6cm,yshift=0cm] (0cm,0cm+6*\bc) rectangle (0cm+1*\bc,0cm+7*\bc);
\draw[fill=blue!8,xshift=5.6cm,yshift=0cm,line width=0.6pt] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+7*\bcc);
\draw[step=\bcc,gray,xshift=5.6cm,yshift=0cm] (0cm,0cm) grid (0cm+1*\bcc,0cm+7*\bcc);
%\draw[line width=0.7pt,xshift=5.6cm,yshift=0cm] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+7*\bcc);
\draw[red!60,line width=2pt,xshift=5.6cm,yshift=0cm] (0cm,0cm) rectangle (0cm+1*\bcc,0cm+1*\bcc);
\draw[red!60,line width=2pt,xshift=5.6cm,yshift=0cm] (0cm,0cm+2*\bcc) rectangle (0cm+1*\bcc,0cm+3*\bcc);
\draw[red!60,line width=2pt,xshift=5.6cm,yshift=0cm] (0cm,0cm+6*\bcc) rectangle (0cm+1*\bcc,0cm+7*\bcc);
\draw[line] (8.4cm, 2.6cm) -- (11cm,2.2cm);
\draw[line] (9cm,1cm) -- (11.3cm, 1.4cm);
\draw[red!60,line] ([xshift=5.6cm,yshift=0cm]0cm+1*\bc,0cm+7*\bc) -- (8.45cm,1.4cm);
\draw[red!60,line] ([xshift=5.6cm,yshift=0cm]0cm+1*\bc,0cm) -- (8.6cm, 1cm);
\draw[red!60,line] ([xshift=5.6cm,yshift=0cm]0cm+1*\bcc,0cm+7*\bcc) -- (8.45cm,1.4cm);
\draw[red!60,line] ([xshift=5.6cm,yshift=0cm]0cm+1*\bcc,0cm) -- (8.6cm, 1cm);
\draw[red!60,line] (0cm+6*\bc,0cm+9*\bc) -- ([xshift=5.6cm,yshift=0cm]0cm,0cm+7*\bc);
\draw[red!60,line] (0cm+6*\bc,0cm+7*\bc) -- ([xshift=5.6cm,yshift=0cm]0cm,0cm+6*\bc);
\draw[red!60,line] (0cm+6*\bc,0cm+2*\bc) -- ([xshift=5.6cm,yshift=0cm]0cm,0cm+1*\bc);
\draw[red!60,line] (0cm+6*\bc,0cm) -- ([xshift=5.6cm,yshift=0cm]0cm,0cm);
\draw[ugreen!60,line] (0cm+6*\bc,0cm+6*\bc) -- ([xshift=5.0cm,yshift=1.3cm]0cm,0cm+3*\bc);
\draw[ugreen!60,line] (0cm+6*\bc,0cm+3*\bc) -- ([xshift=5.0cm,yshift=1.3cm]0cm,0cm+2*\bc);
\draw[red!60,line] ([xshift=0.3cm,yshift=0.5cm]0cm+6*\bc,0cm+4*\bc) -- ([xshift=5.6cm,yshift=0cm]0cm,0cm+3*\bc);
\draw[red!60,line] ([xshift=0.3cm,yshift=0.5cm]0cm+6*\bc,0cm+2*\bc) -- ([xshift=5.6cm,yshift=0cm]0cm,0cm+2*\bc);
\draw[red!60,line] (0cm+6*\bcc,0cm+9*\bcc) -- ([xshift=5.6cm,yshift=0cm]0cm,0cm+7*\bcc);
\draw[red!60,line] (0cm+6*\bcc,0cm+7*\bcc) -- ([xshift=5.6cm,yshift=0cm]0cm,0cm+6*\bcc);
\draw[red!60,line] (0cm+6*\bcc,0cm+2*\bcc) -- ([xshift=5.6cm,yshift=0cm]0cm,0cm+1*\bcc);
\draw[red!60,line] (0cm+6*\bcc,0cm) -- ([xshift=5.6cm,yshift=0cm]0cm,0cm);
\draw[ugreen!60,line] (0cm+6*\bcc,0cm+6*\bcc) -- ([xshift=5.0cm,yshift=1.3cm]0cm,0cm+3*\bcc);
\draw[ugreen!60,line] (0cm+6*\bcc,0cm+3*\bcc) -- ([xshift=5.0cm,yshift=1.3cm]0cm,0cm+2*\bcc);
\draw[red!60,line] ([xshift=0.3cm,yshift=0.5cm]0cm+6*\bcc,0cm+4*\bcc) -- ([xshift=5.6cm,yshift=0cm]0cm,0cm+3*\bcc);
\draw[red!60,line] ([xshift=0.3cm,yshift=0.5cm]0cm+6*\bcc,0cm+2*\bcc) -- ([xshift=5.6cm,yshift=0cm]0cm,0cm+2*\bcc);
\node[word] (w1) at (-0.5cm, 3.4cm) {wait};
\node[word] (w2) at ([yshift=-\bc]w1) {for};
\node[word] (w3) at ([yshift=-\bc]w2) {the};
\node[word] (w4) at ([yshift=-\bc]w3) {video};
\node[word] (w5) at ([yshift=-\bc]w4) {and};
\node[word] (w6) at ([yshift=-\bc]w5) {do};
\node[word] (w7) at ([yshift=-\bc]w6) {n't};
\node[word] (w8) at ([yshift=-\bc]w7) {rent};
\node[word] (w9) at ([yshift=-\bc]w8) {it};
\node[word] (w2) at ([yshift=-\bcc]w1) {for};
\node[word] (w3) at ([yshift=-\bcc]w2) {the};
\node[word] (w4) at ([yshift=-\bcc]w3) {video};
\node[word] (w5) at ([yshift=-\bcc]w4) {and};
\node[word] (w6) at ([yshift=-\bcc]w5) {do};
\node[word] (w7) at ([yshift=-\bcc]w6) {n't};
\node[word] (w8) at ([yshift=-\bcc]w7) {rent};
\node[word] (w9) at ([yshift=-\bcc]w8) {it};
\node[draw,rectangle callout,callout relative pointer={(0.28,-0.6)}] at (-0.3cm,4.6cm) {\textrm{卷积核}};
\node[draw,rectangle callout,callout relative pointer={(0.1,-0.5)}] at (5cm,4.6cm) {\textrm{特征图}};
......
......@@ -16,7 +16,7 @@
\renewcommand\figurename{}%将figure改为图
\renewcommand\tablename{}%将figure改为图
\chapterimage{fig-NEU-2.jpg} % Chapter heading image
\newlength{\bc}
\newlength{\bcc}
%----------------------------------------------------------------------------------------
% CHAPTER 11
......
......@@ -587,7 +587,7 @@ Transformer Deep(48层) & 30.2 & 43.1 & 194$\times 10^
\begin{itemize}
\vspace{0.5em}
\item 近两年,有研究已经发现注意力机制可以捕捉一些语言现象\upcite{DBLP:journals/corr/abs-1905-09418},比如,在Transformer 的多头注意力中,不同头往往会捕捉到不同的信息,比如,有些头对低频词更加敏感,有些头更适合词意消歧,甚至有些头可以捕捉句法信息。此外,由于注意力机制增加了模型的复杂性,而且随着网络层数的增多,神经机器翻译中也存在大量的冗余,因此研发轻量的注意力模型也是具有实践意义的方向\upcite{Xiao2019SharingAW,DBLP:journals/corr/abs-1805-00631,Lin2020WeightDT}
\item 近两年,有研究已经发现注意力机制可以捕捉一些语言现象\upcite{DBLP:journals/corr/abs-1905-09418},比如,在Transformer 的多头注意力中,不同头往往会捕捉到不同的信息,比如,有些头对低频词更加敏感,有些头更适合词意消歧,甚至有些头可以捕捉句法信息。此外,由于注意力机制增加了模型的复杂性,而且随着网络层数的增多,神经机器翻译中也存在大量的冗余,因此研发轻量的注意力模型也是具有实践意义的方向\upcite{Xiao2019SharingAW,DBLP:journals/corr/abs-1805-00631,Lin2020WeightDT,DBLP:conf/iclr/WuLLLH20,Kitaev2020ReformerTE,DBLP:journals/corr/abs-2005-00743,dai-etal-2019-transformer,DBLP:journals/corr/abs-2004-05150,DBLP:conf/iclr/RaePJHL20}
\vspace{0.5em}
\item 神经机器翻译依赖成本较高的GPU设备,因此对模型的裁剪和加速也是很多系统研发人员所感兴趣的方向。比如,从工程上,可以考虑减少运算强度,比如使用低精度浮点数\upcite{Ott2018ScalingNM} 或者整数\upcite{DBLP:journals/corr/abs-1906-00532,Lin2020TowardsF8}进行计算,或者引入缓存机制来加速模型的推断\upcite{Vaswani2018Tensor2TensorFN};也可以通过对模型参数矩阵的剪枝来减小整个模型的体积\upcite{DBLP:journals/corr/SeeLM16};另一种方法是知识蒸馏\upcite{Hinton2015Distilling,kim-rush-2016-sequence}。 利用大模型训练小模型,这样往往可以得到比单独训练小模型更好的效果\upcite{DBLP:journals/corr/ChenLCL17}
\vspace{0.5em}
......
......@@ -4337,6 +4337,43 @@ year = {2012}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%% chapter 10------------------------------------------------------
@inproceedings{DBLP:conf/acl/LiLWJXZLL20,
author = {Bei Li and
Hui Liu and
Ziyang Wang and
Yufan Jiang and
Tong Xiao and
Jingbo Zhu and
Tongran Liu and
Changliang Li},
title = {Does Multi-Encoder Help? {A} Case Study on Context-Aware Neural Machine
Translation},
pages = {3512--3518},
publisher = {Association for Computational Linguistics},
year = {2020}
}
@inproceedings{DBLP:conf/emnlp/MiWI16,
author = {Haitao Mi and
Zhiguo Wang and
Abe Ittycheriah},
title = {Supervised Attentions for Neural Machine Translation},
pages = {2283--2288},
publisher = {The Association for Computational Linguistics},
year = {2016}
}
@inproceedings{DBLP:conf/coling/LiuUFS16,
author = {Lemao Liu and
Masao Utiyama and
Andrew M. Finch and
Eiichiro Sumita},
title = {Neural Machine Translation with Supervised Attention},
pages = {3093--3102},
publisher = {The Association for Computational Linguistics},
year = {2016}
}
@inproceedings{devlin-etal-2014-fast,
author = {Jacob Devlin and
Rabih Zbib and
......@@ -4378,13 +4415,15 @@ year = {2012}
year = {1998},
}
@article{BENGIO1994Learning,
author ={Y. {Bengio} and P. {Simard} and P. {Frasconi}},
journal ={IEEE Transactions on Neural Networks},
title ={Learning long-term dependencies with gradient descent is difficult},
year ={1994},
volume ={5},
number ={2},
pages ={157-166},
author = {Yoshua Bengio and
Patrice Y. Simard and
Paolo Frasconi},
title = {Learning long-term dependencies with gradient descent is difficult},
journal = {Institute of Electrical and Electronics Engineers},
volume = {5},
number = {2},
pages = {157--166},
year = {1994}
}
@inproceedings{NIPS2017_7181,
author = {Ashish Vaswani and
......@@ -4460,7 +4499,7 @@ pages ={157-166},
title = {Learning Deep Transformer Models for Machine Translation},
pages = {1810--1822},
publisher = {Association for Computational Linguistics},
year = {2019},
year = {2019}
}
@article{Li2020NeuralMT,
author = {Yanyang Li and
......@@ -4860,21 +4899,24 @@ pages ={157-166},
year={2018}
}
@inproceedings{Lin2020TowardsF8,
title={Towards Fully 8-bit Integer Inference for the Transformer Model},
author={Y. Lin and Yanyang Li and Tengbo Liu and Tong Xiao and T. Liu and Jingbo Zhu},
publisher={International Joint Conference on Artificial Intelligence},
year={2020}
author = {Ye Lin and
Yanyang Li and
Tengbo Liu and
Tong Xiao and
Tongran Liu and
Jingbo Zhu},
title = {Towards Fully 8-bit Integer Inference for the Transformer Model},
pages = {3759--3765},
publisher = {International Joint Conference on Artificial Intelligence},
year = {2020}
}
@inproceedings{kim-rush-2016-sequence,
title = "Sequence-Level Knowledge Distillation",
author = "Kim, Yoon and
Rush, Alexander M.",
publisher = "Proceedings of the 2016 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2016",
//address = "Austin, Texas",
//publisher = "Association for Computational Linguistics",
pages = "1317--1327",
author = {Yoon Kim and
Alexander M. Rush},
title = {Sequence-Level Knowledge Distillation},
pages = {1317--1327},
publisher = {The Association for Computational Linguistics},
year = {2016}
}
@article{Akaike1969autoregressive,
author = {Hirotugu Akaike},
......@@ -4914,16 +4956,14 @@ pages ={157-166},
year={2018}
}
@inproceedings{cho-etal-2014-properties,
title = "On the Properties of Neural Machine Translation: Encoder--Decoder Approaches",
author = {Cho, Kyunghyun and
van Merri{\"e}nboer, Bart and
Bahdanau, Dzmitry and
Bengio, Yoshua},
month = oct,
year = "2014",
address = "Doha, Qatar",
publisher = "Association for Computational Linguistics",
pages = "103--111",
author = {Kyunghyun Cho and
Bart van Merrienboer and
Dzmitry Bahdanau and
Yoshua Bengio},
title = {On the Properties of Neural Machine Translation: Encoder-Decoder Approaches},
pages = {103--111},
publisher = {Association for Computational Linguistics},
year = {2014}
}
@inproceedings{DBLP:conf/acl/JeanCMB15,
......@@ -4948,10 +4988,14 @@ pages ={157-166},
year = {2015}
}
@inproceedings{He2016ImprovedNM,
title={Improved Neural Machine Translation with SMT Features},
author={W. He and Zhongjun He and Hua Wu and H. Wang},
booktitle={AAAI Conference on Artificial Intelligence},
year={2016}
author = {Wei He and
Zhongjun He and
Hua Wu and
Haifeng Wang},
title = {Improved Neural Machine Translation with {SMT} Features},
pages = {151--157},
publisher = {the Association for the Advance of Artificial Intelligence},
year = {2016}
}
@inproceedings{zhang-etal-2017-prior,
title = {Prior Knowledge Integration for Neural Machine Translation using Posterior Regularization},
......@@ -4966,45 +5010,40 @@ pages ={157-166},
}
@inproceedings{duan-etal-2020-bilingual,
title = "Bilingual Dictionary Based Neural Machine Translation without Using Parallel Sentences",
author = "Duan, Xiangyu and
Ji, Baijun and
Jia, Hao and
Tan, Min and
Zhang, Min and
Chen, Boxing and
Luo, Weihua and
Zhang, Yue",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
pages = "1570--1579",
author = {Xiangyu Duan and
Baijun Ji and
Hao Jia and
Min Tan and
Min Zhang and
Boxing Chen and
Weihua Luo and
Yue Zhang},
title = {Bilingual Dictionary Based Neural Machine Translation without Using
Parallel Sentences},
pages = {1570--1579},
publisher = {Association for Computational Linguistics},
year = {2020}
}
@inproceedings{cao-xiong-2018-encoding,
title = "Encoding Gated Translation Memory into Neural Machine Translation",
author = "Cao, Qian and
Xiong, Deyi",
month = oct,
year = "2018",
address = "Brussels, Belgium",
publisher = "Association for Computational Linguistics",
pages = "3042--3047",
author = {Qian Cao and
Deyi Xiong},
title = {Encoding Gated Translation Memory into Neural Machine Translation},
pages = {3042--3047},
publisher = {Association for Computational Linguistics},
year = {2018}
}
@inproceedings{yang-etal-2016-hierarchical,
title = "Hierarchical Attention Networks for Document Classification",
author = "Yang, Zichao and
Yang, Diyi and
Dyer, Chris and
He, Xiaodong and
Smola, Alex and
Hovy, Eduard",
month = jun,
year = "2016",
address = "San Diego, California",
publisher = "Association for Computational Linguistics",
pages = "1480--1489",
author = {Zichao Yang and
Diyi Yang and
Chris Dyer and
Xiaodong He and
Alexander J. Smola and
Eduard H. Hovy},
title = {Hierarchical Attention Networks for Document Classification},
pages = {1480--1489},
publisher = {The Association for Computational Linguistics},
year = {2016}
}
%%%%% chapter 10------------------------------------------------------
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
......@@ -5014,9 +5053,6 @@ pages ={157-166},
@inproceedings{DBLP:conf/naacl/Johnson015,
author = {Rie Johnson and
Tong Zhang},
editor = {Rada Mihalcea and
Joyce Yue Chai and
Anoop Sarkar},
title = {Effective Use of Word Order for Text Categorization with Convolutional
Neural Networks},
pages = {103--112},
......@@ -5027,10 +5063,6 @@ pages ={157-166},
@inproceedings{DBLP:conf/naacl/NguyenG15,
author = {Thien Huu Nguyen and
Ralph Grishman},
editor = {Phil Blunsom and
Shay B. Cohen and
Paramveer S. Dhillon and
Percy Liang},
title = {Relation Extraction: Perspective from Convolutional Neural Networks},
pages = {39--48},
publisher = {The Association for Computational Linguistics},
......@@ -5411,6 +5443,51 @@ pages ={157-166},
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%% chapter 12------------------------------------------------------
@inproceedings{DBLP:conf/iclr/RaePJHL20,
author = {Jack W. Rae and
Anna Potapenko and
Siddhant M. Jayakumar and
Chloe Hillier and
Timothy P. Lillicrap},
title = {Compressive Transformers for Long-Range Sequence Modelling},
publisher = {OpenReview.net},
year = {2020}
}
@article{DBLP:journals/corr/abs-2004-05150,
author = {Iz Beltagy and
Matthew E. Peters and
Arman Cohan},
title = {Longformer: The Long-Document Transformer},
journal = {CoRR},
volume = {abs/2004.05150},
year = {2020}
}
@article{DBLP:journals/corr/abs-2005-00743,
author = {Yi Tay and
Dara Bahri and
Donald Metzler and
Da-Cheng Juan and
Zhe Zhao and
Che Zheng},
title = {Synthesizer: Rethinking Self-Attention in Transformer Models},
journal = {CoRR},
volume = {abs/2005.00743},
year = {2020}
}
@inproceedings{DBLP:conf/iclr/WuLLLH20,
author = {Zhanghao Wu and
Zhijian Liu and
Ji Lin and
Yujun Lin and
Song Han},
title = {Lite Transformer with Long-Short Range Attention},
publisher = {OpenReview.net},
year = {2020}
}
@inproceedings{DBLP:journals/corr/abs-1905-09418,
author = {Elena Voita and
David Talbot and
......@@ -5506,18 +5583,16 @@ pages ={157-166},
}
@inproceedings{dai-etal-2019-transformer,
title = "Transformer-{XL}: Attentive Language Models beyond a Fixed-Length Context",
author = "Dai, Zihang and
Yang, Zhilin and
Yang, Yiming and
Carbonell, Jaime and
Le, Quoc and
Salakhutdinov, Ruslan",
month = jul,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
pages = "2978--2988",
author = {Zihang Dai and
Zhilin Yang and
Yiming Yang and
Jaime G. Carbonell and
Quoc Viet Le and
Ruslan Salakhutdinov},
title = {Transformer-XL: Attentive Language Models beyond a Fixed-Length Context},
pages = {2978--2988},
publisher = {Association for Computational Linguistics},
year = {2019}
}
@article{Liu2020LearningTE,
title={Learning to Encode Position for Transformer with Continuous Dynamical Model},
......@@ -5563,10 +5638,15 @@ pages ={157-166},
year={2018}
}
@inproceedings{Dou2018ExploitingDR,
title={Exploiting Deep Representations for Neural Machine Translation},
author={Zi-Yi Dou and Zhaopeng Tu and Xing Wang and Shuming Shi and T. Zhang},
publisher={Conference on Empirical Methods in Natural Language Processing},
year={2018}
author = {Zi-Yi Dou and
Zhaopeng Tu and
Xing Wang and
Shuming Shi and
Tong Zhang},
title = {Exploiting Deep Representations for Neural Machine Translation},
pages = {4253--4262},
publisher = {Association for Computational Linguistics},
year = {2018}
}
@inproceedings{Wang2019ExploitingSC,
title={Exploiting Sentential Context for Neural Machine Translation},
......@@ -5576,10 +5656,16 @@ pages ={157-166},
}
@inproceedings{Dou2019DynamicLA,
title={Dynamic Layer Aggregation for Neural Machine Translation},
author={Zi-Yi Dou and Zhaopeng Tu and Xing Wang and Longyue Wang and Shuming Shi and T. Zhang},
publisher={AAAI Conference on Artificial Intelligence},
year={2019}
author = {Zi-Yi Dou and
Zhaopeng Tu and
Xing Wang and
Longyue Wang and
Shuming Shi and
Tong Zhang},
title = {Dynamic Layer Aggregation for Neural Machine Translation with Routing-by-Agreement},
pages = {86--93},
publisher = {the Association for the Advance of Artificial Intelligence},
year = {2019}
}
@inproceedings{Wei2020MultiscaleCD,
title={Multiscale Collaborative Deep Models for Neural Machine Translation},
......@@ -5614,7 +5700,7 @@ pages ={157-166},
@article{li2020shallow,
title={Shallow-to-Deep Training for Neural Machine Translation},
author={Li, Bei and Wang, Ziyang and Liu, Hui and Jiang, Yufan and Du, Quan and Xiao, Tong and Wang, Huizhen and Zhu, Jingbo},
journal={arXiv preprint arXiv:2010.03737},
publisher={Conference on Empirical Methods in Natural Language Processing},
year={2020}
}
%%%%% chapter 12------------------------------------------------------
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论