Commit e41da37b by xiaotong

new update

parent 77015eeb
...@@ -114,44 +114,95 @@ ...@@ -114,44 +114,95 @@
%%%------------------------------------------------------------------------------------------------------------ %%%------------------------------------------------------------------------------------------------------------
\section{编码器-解码器框架} \section{编码器-解码器框架}
%%%------------------------------------------------------------------------------------------------------------ %%%------------------------------------------------------------------------------------------------------------
%%% 译文对比 \section{循环神经网络翻译模型及注意力机制}
\begin{frame}{今天机器翻译的水平}
\begin{tcolorbox}[enhanced,frame engine=empty,boxrule=0.1mm,size=title,colback=red!10!white]
\begin{flushleft}
\begin{spacing}{0.9} %%%------------------------------------------------------------------------------------------------------------
\scriptsize{\textbf{原文(英语)}: During Soviet times, if a city's population topped one million, it would become eligible for its own metro. Planners wanted to brighten the lives of everyday Soviet citizens, and saw the metros, with their tens of thousands of daily passengers, as a singular opportunity to do so. In 1977, Tashkent, the capital of Uzbekistan, became the seventh Soviet city to have a metro built. Grand themes celebrating the history of Uzbekistan and the Soviet Union were brought to life, as art was commissioned and designers set to work. The stations reflected different themes, some with domed ceilings and painted tiles reminiscent of Uzbekistan's Silk Road mosques, while others ... } \subsection{起源}
\end{spacing}
\end{flushleft}
\end{tcolorbox}
\begin{minipage}[t]{0.47\textwidth} %%%------------------------------------------------------------------------------------------------------------
\begin{beamerboxesrounded}[upper=uppercolblue,lower=lowercolblue,shadow=true]{\scriptsize{\textbf{译文\visible<2->{(统计机器翻译)}}}} %%% 神经机器翻译的历史
{\scriptsize \begin{frame}{最初的神经机器翻译}
\begin{spacing}{0.9}
在苏联时代,如果一个城市的人口突破一百万,这将成为合资格为自己的地铁。规划者想去照亮每天的苏联公民的生命,看到地铁,与他们的数十每天数千乘客,作为一个独特的机会来这样做。1977年,塔什干,乌兹别克斯坦的首都,成了苏联第七城市建有地铁。宏大主题,庆祝乌兹别克斯坦和苏联的历史被带到生活,因为艺术是委托和设计师开始工作。车站反映了不同的主题,有的圆顶天花板和绘瓷砖让人想起乌兹别克斯坦是丝绸之路的清真寺,而另一些则装饰着... \begin{itemize}
\end{spacing} \item 神经网络的在机器翻译中并不新鲜,在很多模块中早有实现,比如,翻译候选打分、语言模型等
\begin{itemize}
\item 但是,整个框架仍然是统计机器翻译
\end{itemize}
\item<2-> 基于神经元网络的端到端建模出现在2013-2015,被称为\alert{Neural Machine Translation (NMT)},一些代表性工作:
\end{itemize}
\visible<2->{
\begin{center}
{\footnotesize
\begin{tabular}{l | l | l}
\textbf{时间} & \textbf{作者} & \textbf{论文} \\ \hline
2013 & Kalchbrenner和 & Recurrent Continuous Translation Models \\
& Blunsom & \\
2014 & Sutskever等 & Sequence to Sequence Learning with \\
& & neural networks \\
2014 & Cho等 & Learning Phrase Representations using \\
& & RNN Encoder-Decoder for Statistical \\
& & Machine Translation \\
2014 & Cho等 & On the Properties of Neural Machine \\
& & Translation \\
2015 & Jean等 & On Using Very Large Target Vocabulary \\
& & for Neural Machine Translation
\end{tabular}
} }
\end{beamerboxesrounded} \end{center}
\end{minipage}
\hfill
\begin{minipage}[t]{0.47\textwidth}
\begin{beamerboxesrounded}[upper=uppercolblue,lower=lowercolblue,shadow=true]{\scriptsize{\textbf{译文\visible<2->{(神经机器翻译 - 很流畅!)}}}}
{\scriptsize
\begin{spacing}{0.9}
在苏联时期,如果一个城市的人口超过一百万,它就有资格拥有自己的地铁。 规划者想要照亮日常苏联公民的生活,并把拥有数万名每日乘客的地铁看作是这样做的一个绝佳机会。 1977年,乌兹别克斯坦首都塔什干成为苏联第七个修建地铁的城市。 随着艺术的委托和设计师们的工作,乌兹别克斯坦和苏联历史的宏伟主题被赋予了生命力。 这些电台反映了不同的主题,有的有穹顶和彩砖,让人想起乌兹别克斯坦的丝绸之路清真寺,有的则用...
\end{spacing}
} }
\end{beamerboxesrounded}
\end{minipage}
\end{frame} \end{frame}
%%%------------------------------------------------------------------------------------------------------------ %%%------------------------------------------------------------------------------------------------------------
\section{循环神经网络翻译模型及注意力机制} %%% 逐渐崛起的NMT
\begin{frame}{崛起}
\begin{itemize}
\item 2015前统计机器翻译(SMT)在NLP是具有统治力的
\begin{itemize}
\item 当时NMT的系统还很初步,被SMT碾压
\item 大多数的认知还没有进化到NMT年代,甚至Kalchbrenner等人早期的报告也被人质疑
\end{itemize}
\end{itemize}
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% 模型结构
\begin{frame}{基于循环神经网络的翻译模型}
\begin{itemize}
\item 一种简单的模型
\end{itemize}
%%% 图
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% LSTM
\begin{frame}{长短时记忆模型(LSTM) (2页?)}
\begin{itemize}
\item LSTM
\end{itemize}
%%% 图
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% LSTM
\begin{frame}{门循环单元(GRU)}
\begin{itemize}
\item GRU
\end{itemize}
%%% 图
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% 一些变种
\begin{frame}{进一步的改进}
\begin{itemize}
\item 多层网络
\item fine-tuning
\end{itemize}
%%% 图
\end{frame}
%%%------------------------------------------------------------------------------------------------------------ %%%------------------------------------------------------------------------------------------------------------
\subsection{模型结构} \subsection{模型结构}
......
...@@ -434,6 +434,7 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\ ...@@ -434,6 +434,7 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\
\begin{itemize} \begin{itemize}
\item<3-> 机器翻译会逐渐替代人?\visible<4->{\alert{- 不会,爹是不能被儿子替代的}} \item<3-> 机器翻译会逐渐替代人?\visible<4->{\alert{- 不会,爹是不能被儿子替代的}}
\item<3-> 还需要人的语言学知识吗?\visible<4->{\alert{- 会,但是需要新的思路}}
\item<3-> 就连神经网络也可以通过结构搜索自动学习,人类是不是就什么也不用干了?\visible<4->{\alert{- 结构搜索又是谁设计的,人至少需要敲下键盘吧,哈哈哈哈}} \item<3-> 就连神经网络也可以通过结构搜索自动学习,人类是不是就什么也不用干了?\visible<4->{\alert{- 结构搜索又是谁设计的,人至少需要敲下键盘吧,哈哈哈哈}}
\end{itemize} \end{itemize}
} }
...@@ -448,8 +449,9 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\ ...@@ -448,8 +449,9 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\
\begin{tcolorbox}[enhanced,size=normal,left=2mm,right=1mm,colback=red!5!white,colframe=red!75!black,drop fuzzy shadow] \begin{tcolorbox}[enhanced,size=normal,left=2mm,right=1mm,colback=red!5!white,colframe=red!75!black,drop fuzzy shadow]
{\large {\large
\textbf{入门:循环网络翻译模型及注意力机制} \\ \textbf{入门:循环网络翻译模型及注意力机制} \\
\small{1. 模型结构} \\ \small{1. 起源} \\
\small{2. 注意力机制} \small{2. 模型结构} \\
\small{3. 注意力机制}
} }
\end{tcolorbox} \end{tcolorbox}
...@@ -499,10 +501,10 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\ ...@@ -499,10 +501,10 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\
\node [anchor=west,rnnnode] (node13) at ([xshift=1em]node12.east) {\tiny{RNN Cell}}; \node [anchor=west,rnnnode] (node13) at ([xshift=1em]node12.east) {\tiny{RNN Cell}};
\node [anchor=west,rnnnode] (node14) at ([xshift=1em]node13.east) {\tiny{RNN Cell}}; \node [anchor=west,rnnnode] (node14) at ([xshift=1em]node13.east) {\tiny{RNN Cell}};
\node [anchor=north,rnnnode,fill=red!30!white] (e1) at ([yshift=-1em]node11.south) {\tiny{}}; \node [anchor=north,rnnnode,fill=blue!30!white] (e1) at ([yshift=-1em]node11.south) {\tiny{}};
\node [anchor=north,rnnnode,fill=red!30!white] (e2) at ([yshift=-1em]node12.south) {\tiny{}}; \node [anchor=north,rnnnode,fill=blue!30!white] (e2) at ([yshift=-1em]node12.south) {\tiny{}};
\node [anchor=north,rnnnode,fill=red!30!white] (e3) at ([yshift=-1em]node13.south) {\tiny{}}; \node [anchor=north,rnnnode,fill=blue!30!white] (e3) at ([yshift=-1em]node13.south) {\tiny{}};
\node [anchor=north,rnnnode,fill=red!30!white] (e4) at ([yshift=-1em]node14.south) {\tiny{}}; \node [anchor=north,rnnnode,fill=blue!30!white] (e4) at ([yshift=-1em]node14.south) {\tiny{}};
\node [anchor=north,inner sep=2pt] (w1) at ([yshift=-1em]e1.south) {\tiny{$<$s$>$}}; \node [anchor=north,inner sep=2pt] (w1) at ([yshift=-1em]e1.south) {\tiny{$<$s$>$}};
\node [anchor=north,inner sep=2pt] (w2) at ([yshift=-1em]e2.south) {\tiny{}}; \node [anchor=north,inner sep=2pt] (w2) at ([yshift=-1em]e2.south) {\tiny{}};
\node [anchor=north,inner sep=2pt] (w3) at ([yshift=-1em]e3.south) {\tiny{我们}}; \node [anchor=north,inner sep=2pt] (w3) at ([yshift=-1em]e3.south) {\tiny{我们}};
...@@ -518,10 +520,10 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\ ...@@ -518,10 +520,10 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\
\draw [->,thick] ([yshift=0.1em]e3.north)--([yshift=-0.1em]node13.south); \draw [->,thick] ([yshift=0.1em]e3.north)--([yshift=-0.1em]node13.south);
\draw [->,thick] ([yshift=0.1em]e4.north)--([yshift=-0.1em]node14.south); \draw [->,thick] ([yshift=0.1em]e4.north)--([yshift=-0.1em]node14.south);
\node [anchor=south,rnnnode,fill=red!30!white] (node21) at ([yshift=1.0em]node11.north) {\tiny{}}; \node [anchor=south,rnnnode,fill=blue!30!white] (node21) at ([yshift=1.0em]node11.north) {\tiny{}};
\node [anchor=south,rnnnode,fill=red!30!white] (node22) at ([yshift=1.0em]node12.north) {\tiny{}}; \node [anchor=south,rnnnode,fill=blue!30!white] (node22) at ([yshift=1.0em]node12.north) {\tiny{}};
\node [anchor=south,rnnnode,fill=red!30!white] (node23) at ([yshift=1.0em]node13.north) {\tiny{}}; \node [anchor=south,rnnnode,fill=blue!30!white] (node23) at ([yshift=1.0em]node13.north) {\tiny{}};
\node [anchor=south,rnnnode,fill=red!30!white] (node24) at ([yshift=1.0em]node14.north) {\tiny{}}; \node [anchor=south,rnnnode,fill=blue!30!white] (node24) at ([yshift=1.0em]node14.north) {\tiny{}};
\node [anchor=south] (output1) at ([yshift=1em]node21.north) {\Large{\textbf{}}}; \node [anchor=south] (output1) at ([yshift=1em]node21.north) {\Large{\textbf{}}};
\node [anchor=south] (output2) at ([yshift=1em]node22.north) {\Large{\textbf{}}}; \node [anchor=south] (output2) at ([yshift=1em]node22.north) {\Large{\textbf{}}};
...@@ -550,6 +552,100 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\ ...@@ -550,6 +552,100 @@ NLP问题的隐含结构假设 & 无隐含结构假设,端到端学习 \\
\end{frame} \end{frame}
%%%------------------------------------------------------------------------------------------------------------
\subsection{起源}
%%%------------------------------------------------------------------------------------------------------------
%%% 神经机器翻译的历史
\begin{frame}{最初的神经机器翻译}
\begin{itemize}
\item 神经网络的在机器翻译中并不新鲜,在很多模块中早有实现,比如,翻译候选打分、语言模型等
\begin{itemize}
\item 但是,整个框架仍然是统计机器翻译
\end{itemize}
\item<2-> 基于神经元网络的端到端建模出现在2013-2015,被称为\alert{Neural Machine Translation (NMT)},一些代表性工作:
\end{itemize}
\visible<2->{
\begin{center}
{\footnotesize
\begin{tabular}{l | l | l}
\textbf{时间} & \textbf{作者} & \textbf{论文} \\ \hline
2013 & Kalchbrenner和 & Recurrent Continuous Translation Models \\
& Blunsom & \\
2014 & Sutskever等 & Sequence to Sequence Learning with \\
& & neural networks \\
2014 & Cho等 & Learning Phrase Representations using \\
& & RNN Encoder-Decoder for Statistical \\
& & Machine Translation \\
2014 & Cho等 & On the Properties of Neural Machine \\
& & Translation \\
2015 & Jean等 & On Using Very Large Target Vocabulary \\
& & for Neural Machine Translation
\end{tabular}
}
\end{center}
}
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% 逐渐崛起的NMT
\begin{frame}{崛起}
\begin{itemize}
\item 2015前统计机器翻译(SMT)在NLP是具有统治力的
\begin{itemize}
\item 当时NMT的系统还很初步,被SMT碾压
\item 大多数的认知还没有进化到NMT年代,甚至Kalchbrenner等人早期的报告也被人质疑
\end{itemize}
\end{itemize}
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% 模型结构
\begin{frame}{基于循环神经网络的翻译模型}
\begin{itemize}
\item 一种简单的模型
\end{itemize}
%%% 图
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% LSTM
\begin{frame}{长短时记忆模型(LSTM) (2页?)}
\begin{itemize}
\item LSTM
\end{itemize}
%%% 图
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% LSTM
\begin{frame}{门循环单元(GRU)}
\begin{itemize}
\item GRU
\end{itemize}
%%% 图
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% 一些变种
\begin{frame}{进一步的改进 - 双向模型}
%%% 图
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% 一些变种
\begin{frame}{进一步的改进 - 多层网络}
%%% 图
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% 一些变种
\begin{frame}{进一步的改进 - fine-tuning}
%%% 图
\end{frame}
%%%------------------------------------------------------------------------------------------------------------ %%%------------------------------------------------------------------------------------------------------------
\subsection{模型结构} \subsection{模型结构}
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论