Commit a7a807cb by xiaotong

more figures

parent c06d6c83
......@@ -145,6 +145,11 @@
\item 整个句子编码到一个向量里可能会有信息丢失
\item 缺少源语-目标语词之间词语的对应。某种意义上讲,一个目标语单词的生成无法区分不同源语单词的贡献
\end{itemize}
\item 但是,翻译是具有很强的局部性的,有些词之间会有更紧密的关系,这种关系可以在建模中考虑
\begin{itemize}
\item 源语词和目标语词的对应并不是均匀的,甚至非常稀疏
\item 比如,一些短语的生成仅依赖于源文中的少数词
\end{itemize}
\end{itemize}
\begin{center}
......@@ -173,19 +178,27 @@
\node [] (wt5) at (t5) {\scriptsize{long}};
\node [] (wt6) at ([xshift=1em]t6) {\scriptsize{sentence}};
\node [anchor=south west,fill=red!30,minimum width=1.6in,minimum height=1.5em] (encoder) at ([yshift=1em]ws1.north west) {\footnotesize{Encoder}};
\node [anchor=south west,fill=red!30,minimum width=1.6in,minimum height=1.5em] (encoder) at ([yshift=1.0em]ws1.north west) {\footnotesize{Encoder}};
\node [anchor=west,fill=blue!30,minimum width=1.9in,minimum height=1.5em] (decoder) at ([xshift=4.5em]encoder.east) {\footnotesize{Decoder}};
\node [anchor=west,fill=green!30,minimum height=1.5em] (representation) at ([xshift=1em]encoder.east) {\footnotesize{表示}};
\draw [->,thick] ([xshift=1pt]encoder.east)--([xshift=-1pt]representation.west);
\draw [->,thick] ([xshift=1pt]representation.east)--([xshift=-1pt]decoder.west);
\foreach \x in {1,2,...,6}
\draw[->] (s\x.north) -- ([yshift=0.5em]s\x.north);
\draw[->] ([yshift=0.1em]s\x.north) -- ([yshift=1.2em]s\x.north);
\foreach \x in {1,2,...,5}
\draw[<-] ([yshift=0.1em]t\x.north) -- ([yshift=1.2em]t\x.north);
\draw[<-] ([yshift=0.1em,xshift=1em]t6.north) -- ([yshift=1.2em,xshift=1em]t6.north);
\end{scope}
\end{tikzpicture}
\end{center}
\vspace{-1.5em}
\end{frame}
......
......@@ -882,7 +882,14 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\
%%%------------------------------------------------------------------------------------------------------------
%%% 一些变种
\begin{frame}{改进 - 双向模型}
%%% 图
\begin{itemize}
\item 自左向右的模型只考虑了左侧的上下文,因此可以用自右向左的模型对右侧上下文建模
\begin{itemize}
\item 最终将两个模型融合同时送给编码端
\end{itemize}
\end{itemize}
\vspace{-0.5em}
\begin{center}
\begin{tikzpicture}
\setlength{\base}{0.9cm}
......@@ -918,7 +925,7 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\
% RNN Decoder
\foreach \x in {1,2,...,10}
\node[rnnnode,minimum height=0.5\base,fill=green!30!white,anchor=south] (demb\x) at ([yshift=2\base]enc\x.north) {};
\node[rnnnode,minimum height=0.5\base,fill=green!30!white,anchor=south] (demb\x) at ([yshift=1.5\base]enc\x.north) {};
\foreach \x in {1,2,...,10}
\node[rnnnode,fill=blue!30!white,anchor=south] (dec\x) at ([yshift=0.5\base]demb\x.north) {};
\foreach \x in {1,2,...,10}
......@@ -1007,7 +1014,7 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\
\draw[-latex'] (backenc\x.west) to (backenc\y.east);
}
\coordinate (bridge) at ([yshift=-1.2\base]demb2);
\draw[-latex'] (enc10.north) .. controls +(north:\base) and +(east:1.5\base) .. (bridge) .. controls +(west:2.5\base) and +(west:0.6\base) .. (dec1.west);
\draw[-latex'] (enc10.north) .. controls +(north:0.7\base) and +(east:1.5\base) .. (bridge) .. controls +(west:2.5\base) and +(west:0.6\base) .. (dec1.west);
\draw[-latex'] (backenc1) to [out=180,in=180] (dec1.west);
% Backward RNN
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论