Commit af91ac3c by 单韦乔

15.4图片

parent 33986658
...@@ -3,22 +3,22 @@ ...@@ -3,22 +3,22 @@
%left %left
\begin{scope} \begin{scope}
\foreach \x/\d in {1/2em, 2/8em, 3/18em, 4/24em} \foreach \x/\d in {1/2em, 2/8em}
\node[unit,fill=yellow!20] at (0,\d) (ln_\x) {层正则}; \node[unit,fill=yellow!20] at (0,\d) (ln_\x) {层正则};
\foreach \x/\d in {1/4em, 2/20em} \foreach \x/\d in {1/4em}
\node[unit,fill=green!20] at (0,\d) (sa_\x) {8头自注意力:512}; \node[unit,fill=green!20] at (0,\d) (sa_\x) {8头自注意力:512};
\foreach \x/\d in {1/6em, 2/16em, 3/22em, 4/32em} \foreach \x/\d in {1/6em, 2/16em}
\node[draw,circle,minimum size=1em,inner sep=1pt] at (0,\d) (add_\x) {\scriptsize\bfnew{+}}; \node[draw,circle,minimum size=1em,inner sep=1pt] at (0,\d) (add_\x) {\scriptsize\bfnew{+}};
\foreach \x/\d in {2/14em, 4/30em} \foreach \x/\d in {2/14em}
\node[unit,fill=red!20] at (0,\d) (conv_\x) {卷积$1 \times 1$:512}; \node[unit,fill=red!20] at (0,\d) (conv_\x) {卷积$1 \times 1$:512};
\foreach \x/\d in {1/10em,3/26em} \foreach \x/\d in {1/10em}
\node[unit,fill=red!20] at (0,\d) (conv_\x) {卷积$1 \times 1$:2048}; \node[unit,fill=red!20] at (0,\d) (conv_\x) {卷积$1 \times 1$:2048};
\foreach \x/\d in {1/12em, 2/28em} \foreach \x/\d in {1/12em}
\node[unit,fill=blue!20] at (0,\d) (relu_\x) {RELU}; \node[unit,fill=blue!20] at (0,\d) (relu_\x) {RELU};
\draw[->,thick] ([yshift=-1.4em]ln_1.-90) -- ([yshift=-0.1em]ln_1.-90); \draw[->,thick] ([yshift=-1.4em]ln_1.-90) -- ([yshift=-0.1em]ln_1.-90);
...@@ -29,80 +29,57 @@ ...@@ -29,80 +29,57 @@
\draw[->,thick] ([yshift=0.1em]conv_1.90) -- ([yshift=-0.1em]relu_1.-90); \draw[->,thick] ([yshift=0.1em]conv_1.90) -- ([yshift=-0.1em]relu_1.-90);
\draw[->,thick] ([yshift=0.1em]relu_1.90) -- ([yshift=-0.1em]conv_2.-90); \draw[->,thick] ([yshift=0.1em]relu_1.90) -- ([yshift=-0.1em]conv_2.-90);
\draw[->,thick] ([yshift=0.1em]conv_2.90) -- ([yshift=-0.1em]add_2.-90); \draw[->,thick] ([yshift=0.1em]conv_2.90) -- ([yshift=-0.1em]add_2.-90);
\draw[->,thick] ([yshift=0.1em]add_2.90) -- ([yshift=-0.1em]ln_3.-90); \draw[->,thick] ([yshift=0.1em]add_2.90) -- ([yshift=1em]add_2.90);
\draw[->,thick] ([yshift=0.1em]ln_3.90) -- ([yshift=-0.1em]sa_2.-90);
\draw[->,thick] ([yshift=0.1em]sa_2.90) -- ([yshift=-0.1em]add_3.-90);
\draw[->,thick] ([yshift=0.1em]add_3.90) -- ([yshift=-0.1em]ln_4.-90);
\draw[->,thick] ([yshift=0.1em]ln_4.90) -- ([yshift=-0.1em]conv_3.-90);
\draw[->,thick] ([yshift=0.1em]conv_3.90) -- ([yshift=-0.1em]relu_2.-90);
\draw[->,thick] ([yshift=0.1em]relu_2.90) -- ([yshift=-0.1em]conv_4.-90);
\draw[->,thick] ([yshift=0.1em]conv_4.90) -- ([yshift=-0.1em]add_4.-90);
\draw[->,thick] ([yshift=0.1em]add_4.90) -- ([yshift=1em]add_4.90);
\draw[->,thick] ([yshift=-0.8em]ln_1.-90) .. controls ([xshift=5em,yshift=-0.8em]ln_1.-90) and ([xshift=5em]add_1.0) .. (add_1.0); \draw[->,thick] ([yshift=-0.8em]ln_1.-90) .. controls ([xshift=5em,yshift=-0.8em]ln_1.-90) and ([xshift=5em]add_1.0) .. (add_1.0);
\draw[->,thick] (add_1.0) .. controls ([xshift=5em]add_1.0) and ([xshift=5em]add_2.0) .. (add_2.0); \draw[->,thick] (add_1.0) .. controls ([xshift=5em]add_1.0) and ([xshift=5em]add_2.0) .. (add_2.0);
\draw[->,thick] (add_2.0) .. controls ([xshift=5em]add_2.0) and ([xshift=5em]add_3.0) .. (add_3.0);
\draw[->,thick] (add_3.0) .. controls ([xshift=5em]add_3.0) and ([xshift=5em]add_4.0) .. (add_4.0);
\node[font=\scriptsize] at (0em, -1em){(a) Transformer编码器中若干块的结构}; \node[font=\scriptsize] at (0em, -1em){(a) Transformer编码器中若干块的结构};
\end{scope} \end{scope}
%right %right
\begin{scope}[xshift=14em] \begin{scope}[xshift=14em]
\foreach \x/\d in {1/2em, 2/8em, 3/16em, 4/22em, 5/28em} \foreach \x/\d in {1/2em, 2/8em, 3/14em}
\node[unit,fill=yellow!20] at (0,\d) (ln_\x) {层正则}; \node[unit,fill=yellow!20] at (0,\d) (ln_\x) {层正则};
\node[unit,fill=green!20] at (0,24em) (sa_1) {8头自注意力:512}; \node[unit,fill=green!20] at (0,24em) (sa_1) {8头自注意力:512};
\foreach \x/\d in {1/6em, 2/14em, 3/20em, 4/26em, 5/36em} \foreach \x/\d in {1/6em, 2/12em, 3/22em}
\node[draw,circle,minimum size=1em,inner sep=1pt] at (0,\d) (add_\x) {\scriptsize\bfnew{+}}; \node[draw,circle,minimum size=1em,inner sep=1pt] at (0,\d) (add_\x) {\scriptsize\bfnew{+}};
\node[unit,fill=red!20] at (0,30em) (conv_4) {卷积$1 \times 1$:2048}; \node[unit,fill=red!20] at (0,16em) (conv_4) {卷积$1 \times 1$:2048};
\node[unit,fill=red!20] at (0,34em) (conv_5) {卷积$1 \times 1$:512}; \node[unit,fill=red!20] at (0,20em) (conv_5) {卷积$1 \times 1$:512};
\node[unit,fill=blue!20] at (0,18em) (relu_3) {RELU};
\node[unit,fill=cyan!20] at (0,4em) (conv_3) {Sep卷积$9 \times 1$:256};
\node[unit,fill=green!20] at (0,10em) (sa_1) {8头自注意力:512};
\node[unit,fill=blue!20] at (0,32em) (relu_3) {RELU};
\node[unit,fill=red!20] at (0,4em) (glu_1) {门控线性单元:512};
\node[unit,fill=red!20] at (-3em,10em) (conv_1) {卷积$1 \times 1$:2048};
\node[unit,fill=cyan!20] at (3em,10em) (conv_2) {卷积$3 \times 1$:256};
\node[unit,fill=blue!20] at (-3em,12em) (relu_1) {RELU};
\node[unit,fill=blue!20] at (3em,12em) (relu_2) {RELU};
\node[unit,fill=cyan!20] at (0em,18em) (conv_3) {Sep卷积$9 \times 1$:256};
\draw[->,thick] ([yshift=-1.4em]ln_1.-90) -- ([yshift=-0.1em]ln_1.-90); \draw[->,thick] ([yshift=-1.4em]ln_1.-90) -- ([yshift=-0.1em]ln_1.-90);
\draw[->,thick] ([yshift=0.1em]ln_1.90) -- ([yshift=-0.1em]glu_1.-90); \draw[->,thick] ([yshift=0.1em]ln_1.90) -- ([yshift=-0.1em]conv_3.-90);
\draw[->,thick] ([yshift=0.1em]glu_1.90) -- ([yshift=-0.1em]add_1.-90); \draw[->,thick] ([yshift=0.1em]conv_3.90) -- ([yshift=-0.1em]add_1.-90);
\draw[->,thick] ([yshift=0.1em]add_1.90) -- ([yshift=-0.1em]ln_2.-90); \draw[->,thick] ([yshift=0.1em]add_1.90) -- ([yshift=-0.1em]ln_2.-90);
\draw[->,thick] ([,yshift=0.1em]ln_2.135) -- ([yshift=-0.1em]conv_1.-90); \draw[->,thick] ([,yshift=0.1em]ln_2.90) -- ([yshift=-0.1em]sa_1.-90);
\draw[->,thick] ([yshift=0.1em]ln_2.45) -- ([yshift=-0.1em]conv_2.-90); \draw[->,thick] ([yshift=0.1em]sa_1.90) -- ([yshift=-0.1em]add_2.-90);
\draw[->,thick] ([yshift=0.1em]conv_1.90) -- ([yshift=-0.1em]relu_1.-90);
\draw[->,thick] ([yshift=0.1em]conv_2.90) -- ([yshift=-0.1em]relu_2.-90);
\draw[->,thick] ([yshift=0.1em]relu_1.90) -- ([yshift=-0.1em]add_2.-135);
\draw[->,thick] ([yshift=0.1em]relu_2.90) -- ([yshift=-0.1em]add_2.-45);
\draw[->,thick] ([yshift=0.1em]add_2.90) -- ([yshift=-0.1em]ln_3.-90); \draw[->,thick] ([yshift=0.1em]add_2.90) -- ([yshift=-0.1em]ln_3.-90);
\draw[->,thick] ([yshift=0.1em]ln_3.90) -- ([yshift=-0.1em]conv_3.-90); \draw[->,thick] ([yshift=0.1em]ln_3.90) -- ([yshift=-0.1em]conv_4.-90);
\draw[->,thick] ([yshift=0.1em]conv_3.90) -- ([yshift=-0.1em]add_3.-90);
\draw[->,thick] ([yshift=0.1em]add_3.90) -- ([yshift=-0.1em]ln_4.-90);
\draw[->,thick] ([yshift=0.1em]ln_4.90) -- ([yshift=-0.1em]sa_1.-90);
\draw[->,thick] ([yshift=0.1em]sa_1.90) -- ([yshift=-0.1em]add_4.-90);
\draw[->,thick] ([yshift=0.1em]add_4.90) -- ([yshift=-0.1em]ln_5.-90);
\draw[->,thick] ([yshift=0.1em]ln_5.90) -- ([yshift=-0.1em]conv_4.-90);
\draw[->,thick] ([yshift=0.1em]conv_4.90) -- ([yshift=-0.1em]relu_3.-90); \draw[->,thick] ([yshift=0.1em]conv_4.90) -- ([yshift=-0.1em]relu_3.-90);
\draw[->,thick] ([yshift=0.1em]relu_3.90) -- ([yshift=-0.1em]conv_5.-90); \draw[->,thick] ([yshift=0.1em]relu_3.90) -- ([yshift=-0.1em]conv_5.-90);
\draw[->,thick] ([yshift=0.1em]conv_5.90) -- ([yshift=-0.1em]add_5.-90); \draw[->,thick] ([yshift=0.1em]conv_5.90) -- ([yshift=-0.1em]add_3.-90);
\draw[->,thick] ([yshift=0.1em]add_5.90) -- ([yshift=1em]add_5.90); \draw[->,thick] ([yshift=0.1em]add_3.90) -- ([yshift=1em]add_3.90);
\draw[->,thick] ([yshift=-0.8em]ln_1.-90) .. controls ([xshift=5em,yshift=-0.8em]ln_1.-90) and ([xshift=5em]add_1.0) .. (add_1.0); \draw[->,thick] ([yshift=-0.8em]ln_1.-90) .. controls ([xshift=5em,yshift=-0.8em]ln_1.-90) and ([xshift=5em]add_1.0) .. (add_1.0);
\draw[->,thick] (add_1.0) .. controls ([xshift=8em]add_1.0) and ([xshift=8em]add_3.0) .. (add_3.0); \draw[->,thick] (add_1.0) .. controls ([xshift=5em]add_1.0) and ([xshift=5em]add_2.0) .. (add_2.0);
\draw[->,thick] (add_3.0) .. controls ([xshift=5em]add_3.0) and ([xshift=5em]add_4.0) .. (add_4.0); \draw[->,thick] (add_2.0) .. controls ([xshift=5em]add_2.0) and ([xshift=5em]add_3.0) .. (add_3.0);
\draw[->,thick] (add_4.0) .. controls ([xshift=5em]add_4.0) and ([xshift=5em]add_5.0) .. (add_5.0);
\node[font=\scriptsize,align=center] at (0em, -1.5em){(b) 使用结构搜索方法优化后的 \\ Transformer编码器中若干块的结构}; \node[font=\scriptsize,align=center] at (0em, -1.5em){(b) 使用结构搜索方法优化后的 \\ Transformer编码器中若干块的结构};
\node[minimum size=0.8em,inner sep=0pt,rounded corners=1pt,draw,fill=blue!20] (act) at (5.5em, 38em){}; \node[minimum size=0.8em,inner sep=0pt,rounded corners=1pt,draw,fill=blue!20] (act) at (5.5em, 24em){};
\node[anchor=west,font=\footnotesize] at ([xshift=0.1em]act.east){激活函数}; \node[anchor=west,font=\footnotesize] at ([xshift=0.1em]act.east){激活函数};
\node[anchor=north,minimum size=0.8em,inner sep=0pt,rounded corners=1pt,draw,fill=yellow!20] (nor) at ([yshift=-0.6em]act.south){}; \node[anchor=north,minimum size=0.8em,inner sep=0pt,rounded corners=1pt,draw,fill=yellow!20] (nor) at ([yshift=-0.6em]act.south){};
\node[anchor=west,font=\footnotesize] at ([xshift=0.1em]nor.east){正则化}; \node[anchor=west,font=\footnotesize] at ([xshift=0.1em]nor.east){正则化};
\node[anchor=north,minimum size=0.8em,inner sep=0pt,rounded corners=1pt,draw,fill=cyan!20] (wc) at ([yshift=-0.6em]nor.south){}; \node[anchor=north,minimum size=0.8em,inner sep=0pt,rounded corners=1pt,draw,fill=cyan!20] (wc) at ([yshift=-0.6em]nor.south){};
\node[anchor=west,font=\footnotesize] at ([xshift=0.1em]wc.east){宽卷积}; \node[anchor=west,font=\footnotesize] at ([xshift=0.1em]wc.east){宽卷积};
\node[anchor=north,minimum size=0.8em,inner sep=0pt,rounded corners=1pt,draw,fill=green!20] (at) at ([yshift=-0.6em]wc.south){}; \node[anchor=north,minimum size=0.8em,inner sep=0pt,rounded corners=1pt,draw,fill=green!20] (at) at ([yshift=-0.6em]wc.south){};
......
\begin{tikzpicture}
\tikzstyle{node}=[draw,minimum height=1.4em,minimum width=2em,rounded corners=1pt,thick]
\begin{scope}[scale=0.36]
\tikzstyle{every node}=[scale=0.36]
\node[draw=ublue,very thick,drop shadow,fill=white,minimum width=40em,minimum height=25em] (rec3) at (2.25,0){};
\node[draw=ublue,very thick,drop shadow,fill=white,minimum width=22em,minimum height=25em] (rec2) at (-12.4,0){};
\node[draw=ublue,very thick,drop shadow,fill=white,minimum width=24em,minimum height=25em] (rec1) at (-24,0){};
%left
\node[text=ublue] (label1) at (-26.4,4){\Huge\bfnew{结构空间}};
\node[align=left] at (-24,-0.5){\Huge\bfnew{1.前馈神经网络} \\ [4ex] \Huge\bfnew{2.卷积神经网络} \\ [4ex] \Huge\bfnew{3.循环神经网络} \\ [4ex] \Huge\bfnew{4. Transformer网络} \\ [4ex] \Huge\bfnew{...}};
\draw[ublue,very thick,-latex] (rec1.0) -- node[align=center,above,text=violet]{\huge{设计} \\ \huge{搜索} \\ \huge{空间}}(rec2.180);
%mid
\node[text=ublue] (label2) at (-14.4,4){\Huge\bfnew{搜索空间}};
\node[align=left] at (-12.4,-0.5){\Huge\bfnew{循环神经网络} \\ [4ex] \Huge\bfnew{1.普通RNN网络} \\ [4ex] \Huge\bfnew{2. LSTM网络} \\ [4ex] \Huge\bfnew{3. GRU网络} \\ [4ex] \Huge\bfnew{...}};
\draw[ublue,very thick,-latex] (rec2.0) -- node[align=center,above,text=violet]{\huge{选择} \\ \huge{搜索} \\ \huge{策略}}(rec3.180);
\draw[ublue,very thick,-latex,out=-150,in=-30] (rec3.-90) to node[above,text=violet,yshift=1em]{\huge{迭代结构搜索的过程}}(rec2.-90);
\draw[ublue,very thick,-latex,out=60,in=130] ([xshift=-8em]rec3.90) to node[above,text=violet]{\huge{性能评估}}([xshift=8em]rec3.90);
%right
\node[node] (n1) at (0,0){};
\node[node] (n2) at (1.5,0){};
\node[node] (n3) at (3,0){};
\node[node] (n4) at (4.5,0){};
\node[node] (n5) at (1.5,-1.3){};
\node[node] (n6) at (3,-1.3){};
\node[node] (n7) at (2.25,-2.4){};
\node[node] (n8) at (3,1.3){};
\draw[->,thick] (n1.0) -- (n2.180);
\draw[->,thick] (n2.0) -- (n3.180);
\draw[->,thick] (n3.0) -- (n4.180);
\draw[->,thick,out=60,in=180] (n1.90) to (n8.180);
\draw[->,thick,out=-10,in=90] (n8.0) to (n4.90);
\draw[->,thick,out=90,in=-90] (n5.90) to (n3.-90);
\draw[->,thick,out=90,in=-90] (n6.90) to (n4.-90);
\draw[->,thick,out=90,in=-90] (n7.90) to (n5.-90);
\draw[->,thick,out=90,in=-90] (n7.90) to (n6.-90);
\node[font=\huge] (ht) at (-0.2,2.2){$\mathbi h_t$};
\node[draw,font=\huge,inner sep=0pt,minimum width=4em,minimum height=4em,very thick,rounded corners=2pt] (ht-1) at (-3,0) {$\mathbi h_{t-1}$};
\node[draw,font=\huge,inner sep=0pt,minimum width=4em,minimum height=4em,very thick,rounded corners=2pt] (ht+1) at (7.5,0) {$\mathbi h_{t+1}$};
\node[font=\huge] (xt) at (2.25,-4.2){$x_t$};
\node[font=\Huge] at (9,0){$\cdots$};
\node[font=\Huge] at (-4.5,0){$\cdots$};
\node[text=ublue] (label3) at (-2,4){\Huge\bfnew{找到的模型结构}};
\node[draw,rounded corners=6pt,very thick,minimum width=16em,minimum height=15em] (box1) at (2.25,0){};
\draw[->,very thick] (ht-1.0) -- (box1.180);
\draw[->,very thick] (box1.0) -- (ht+1.180);
\draw[->,very thick] (ht-1.90) -- ([yshift=2em]ht-1.90);
\draw[->,very thick] (ht+1.90) -- ([yshift=2em]ht+1.90);
\draw[->,very thick] (box1.90) -- ([yshift=2em]box1.90);
\draw[->,very thick] ([yshift=-2em]ht-1.-90) -- (ht-1.-90);
\draw[->,very thick] ([yshift=-2em]ht+1.-90) -- (ht+1.-90);
\draw[->,very thick] ([yshift=-2em]box1.-90) -- (box1.-90);
\end{scope}
\end{tikzpicture}
\ No newline at end of file
...@@ -50,7 +50,7 @@ ...@@ -50,7 +50,7 @@
% NEW SUBSUB-SECTION % NEW SUBSUB-SECTION
%---------------------------------------------------------------------------------------- %----------------------------------------------------------------------------------------
\subsubsection{1. 位置编码} \subsubsection{1. 位置编码}\label{subsubsec-15.1.1}
\parinterval 对于一个序列,位置编码是在描述对不同位置的偏置。常见的技术手段除了可以在初始的输入中加入统计的偏置信息,如正余弦函数\upcite{vaswani2017attention}, 也可以在计算每层的注意力权重时加入偏置信息。在介绍相对位置编码之前,首先简要回顾一下自注意力机制的计算流程(见{\chaptertwelve})。 \parinterval 对于一个序列,位置编码是在描述对不同位置的偏置。常见的技术手段除了可以在初始的输入中加入统计的偏置信息,如正余弦函数\upcite{vaswani2017attention}, 也可以在计算每层的注意力权重时加入偏置信息。在介绍相对位置编码之前,首先简要回顾一下自注意力机制的计算流程(见{\chaptertwelve})。
...@@ -257,7 +257,7 @@ C(\mathbi{x}_j \mathbi{W}_K,\omega) &=& (\mathbi{x}_{j-\omega},\ldots,\mathbi{x} ...@@ -257,7 +257,7 @@ C(\mathbi{x}_j \mathbi{W}_K,\omega) &=& (\mathbi{x}_{j-\omega},\ldots,\mathbi{x}
\parinterval 在自然语言处理领域,多分支结构同样也有很多应用。比如,{\chapterten}介绍过的基于循环神经网络的翻译模型中,为了更好地对源语言进行表示,编码端可以采用双向循环神经网络。这里双向循环神经网络就可以看作一个两分支的结构,分别用来建模正向序列和反向序列的表示,之后将这两种表示进行拼接得到更丰富的表示。另一个典型的例子是{\chaptertwelve}介绍得多头注意力机制。在Transformer 模型中,多头注意力将输入空间表示分割成多个独立的表示,然后分别进行点积注意力的计算,最后再将多个输出表示拼接后通过线性变化进行不同子空间信息的融合。在这个过程中,多个不同的头对应着不同的特征空间,可以捕捉到不同的特征信息。 \parinterval 在自然语言处理领域,多分支结构同样也有很多应用。比如,{\chapterten}介绍过的基于循环神经网络的翻译模型中,为了更好地对源语言进行表示,编码端可以采用双向循环神经网络。这里双向循环神经网络就可以看作一个两分支的结构,分别用来建模正向序列和反向序列的表示,之后将这两种表示进行拼接得到更丰富的表示。另一个典型的例子是{\chaptertwelve}介绍得多头注意力机制。在Transformer 模型中,多头注意力将输入空间表示分割成多个独立的表示,然后分别进行点积注意力的计算,最后再将多个输出表示拼接后通过线性变化进行不同子空间信息的融合。在这个过程中,多个不同的头对应着不同的特征空间,可以捕捉到不同的特征信息。
\parinterval 近几年,在Transformer的结构基础上,一些研究探索了更为丰富的多分支结构。下面分别介绍几种在Transformer网络中引入多分支结构的方法: \parinterval 近几年,在Transformer的结构基础上,一些研究人员探索了更为丰富的多分支结构。下面分别介绍几种在Transformer网络中引入多分支结构的方法:
\begin{itemize} \begin{itemize}
\vspace{0.5em} \vspace{0.5em}
...@@ -1071,12 +1071,12 @@ lr &=& d_{model}^{-0.5}\cdot step\_num^{-0.5} ...@@ -1071,12 +1071,12 @@ lr &=& d_{model}^{-0.5}\cdot step\_num^{-0.5}
\begin{figure}[htp] \begin{figure}[htp]
\centering \centering
\input{./Chapter15/Figures/figure-encoder-structure-of-transformer-model-optimized-by-nas} \input{./Chapter15/Figures/figure-encoder-structure-of-transformer-model-optimized-by-nas}
\caption{传统Transformer以及通过网络结构搜索方法优化后的Transformer模型编码器结构({\color{red} 如果图是别的论文的,需要引用!层正则化写全!正则化=层正则化?}} \caption{传统Transformer和通过网络结构搜索方法优化后的Transformer({\color{red} 引用论文!}}
\label{fig:15-27} \label{fig:15-27}
\end{figure} \end{figure}
%------------------------------------------- %-------------------------------------------
\parinterval 那么网络结构搜索究竟是一种什么样的技术呢?实际上,人类一直希望能够自动化、快速地解决自己所遇到的问题。机器学习也是为了达到这个目的所产生的技术。如图\ref{fig:15-28}所示,在传统机器学习方法中,研究需要设计大量的特征来描述待解决的问题,即“特征工程”。在深度学习时代,神经网络模型可以完成特征的抽取和学习,但是却需要人工设计神经网络结构,这项工作仍然十分繁重。因此一些科研人员开始思考,能否将设计模型结构的工作也交由机器自动完成?深度学习方法中模型参数能够通过梯度下降等方式进行自动优化,那么模型结构是否可以也看做是一种特殊的参数,使用搜索算法自动找到最适用于当前任务的模型结构? \parinterval 那么网络结构搜索究竟是一种什么样的技术呢?实际上,人类一直希望能够自动化、快速地解决自己所遇到的问题。机器学习也是为了达到这个目的所产生的技术。如图\ref{fig:15-28}所示,在传统机器学习方法中,研究人员需要设计大量的特征来描述待解决的问题,即“特征工程”。在深度学习时代,神经网络模型可以完成特征的抽取和学习,但是却需要人工设计神经网络结构,这项工作仍然十分繁重。因此一些科研人员开始思考,能否将设计模型结构的工作也交由机器自动完成?深度学习方法中模型参数能够通过梯度下降等方式进行自动优化,那么模型结构是否可以也看做是一种特殊的参数,使用搜索算法自动找到最适用于当前任务的模型结构?
%---------------------------------------------- %----------------------------------------------
\begin{figure}[htp] \begin{figure}[htp]
...@@ -1109,7 +1109,7 @@ lr &=& d_{model}^{-0.5}\cdot step\_num^{-0.5} ...@@ -1109,7 +1109,7 @@ lr &=& d_{model}^{-0.5}\cdot step\_num^{-0.5}
%---------------------------------------------- %----------------------------------------------
\begin{figure}[htp] \begin{figure}[htp]
\centering \centering
\includegraphics[scale=0.22]{./Chapter15/Figures/figure-main-flow-of-neural-network-structure-search.png} \input{./Chapter15/Figures/figure-main-flow-of-neural-network-structure-search}
\caption{神经网络结构搜索的主要流程} \caption{神经网络结构搜索的主要流程}
\label{fig:15-29} \label{fig:15-29}
\end{figure} \end{figure}
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论