Skip to content
项目
群组
代码片段
帮助
当前项目
正在载入...
登录 / 注册
切换导航面板
T
Toy-MT-Introduction
概览
Overview
Details
Activity
Cycle Analytics
版本库
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
问题
0
Issues
0
列表
Board
标记
里程碑
合并请求
0
Merge Requests
0
CI / CD
CI / CD
流水线
作业
日程表
图表
维基
Wiki
代码片段
Snippets
成员
Collapse sidebar
Close sidebar
活动
图像
聊天
创建新问题
作业
提交
Issue Boards
Open sidebar
单韦乔
Toy-MT-Introduction
Commits
75f5a627
Commit
75f5a627
authored
Sep 26, 2019
by
xiaotong
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
new pages
parent
bac2deab
隐藏空白字符变更
内嵌
并排
正在显示
2 个修改的文件
包含
218 行增加
和
37 行删除
+218
-37
Section05-Neural-Networks-and-Language-Modeling/section05-test.tex
+60
-30
Section05-Neural-Networks-and-Language-Modeling/section05.tex
+158
-7
没有找到文件。
Section05-Neural-Networks-and-Language-Modeling/section05-test.tex
查看文件 @
75f5a627
...
...
@@ -26,6 +26,7 @@
\usetikzlibrary
{
matrix
}
\usetikzlibrary
{
arrows,decorations.pathreplacing
}
\usetikzlibrary
{
shadows
}
% LATEX and plain TEX when using Tik Z
\usetikzlibrary
{
shadows.blur
}
\usepgflibrary
{
arrows
}
% LATEX and plain TEX and pure pgf
\usetikzlibrary
{
arrows
}
% LATEX and plain TEX when using Tik Z
...
...
@@ -115,51 +116,80 @@
\newcounter
{
mycount4
}
%%%------------------------------------------------------------------------------------------------------------
%%%
定义XTensor
\begin{frame}
{
定义XTensor
}
%%%
利用XTensor构建神经网络
\begin{frame}
{
构建神经网络
}
\begin{itemize}
\item
张量由类XTensor表示,利用InitTensor定义,参数:
\begin{itemize}
\item
指向XTensor类型变量的指针
\item
张量的阶
\item
各个方向维度的大小(维度的约定和传统多维数组一样)
\item
张量的数据类型等(有缺省值)
\end{itemize}
\item
可以很方便的构建一个单层网络
\end{itemize}
\
vspace
{
-0.4em
}
\begin{tcolorbox}
[enhanced,frame engine=empty,boxrule=0.1mm,size=title,colback=blue!10!white]
\begin{flushleft}
{
\scriptsize
\
begin{tcolorbox
}
[bicolor,sidebyside,righthand width=4cm,size=title,frame engine=empty,
colback=blue!10!white,colbacklower=black!5!white]
{
\scriptsize
\begin{tabbing}
\texttt
{
XTensor tensor;
}
\hspace
{
12em
}
\=
// 声明张量tensor
\\
\texttt
{
int sizes[6] =
\{
2,3,4,2,3,4
\}
;
}
\>
// 张量的形状为2*3*4*2*3*4
\\
\texttt
{
InitTensor(
\&
tensor, 6, sizes, X
\_
FLOAT);
}
\>
// 定义形状为sizes的6阶张量
\texttt
{
XTensor x, y, w, b;
}
\\
\texttt
{
InitTensor3D(
\&
x, 3, 4, 5);
}
\\
\texttt
{
InitTensor2D(
\&
w, 5, 3);
}
\\
\texttt
{
InitTensor1D(
\&
b, 3);
}
\\
\texttt
{
...
}
\\
\texttt
{
y = Sigmoid(MMul(x, w) + b);
}
\end{tabbing}
}
\end{flushleft}
\tcblower
\begin{center}
\begin{tikzpicture}
\node
[draw,circle,inner sep=2pt,fill=red!30!white,blur shadow=
{
shadow xshift=1pt,shadow yshift=-1pt
}
] (x) at (0,0)
{
\footnotesize
{$
\textrm
{
x
}$}}
;
\node
[anchor=south,draw,rounded corners,inner sep=2pt,minimum width=4em,fill=green!30!white,blur shadow=
{
shadow xshift=1pt,shadow yshift=-1pt
}
] (layer) at ([yshift=0.7em]x.north)
{
\scriptsize
{
layer
}}
;
\node
[anchor=south,draw,circle,inner sep=2pt,fill=red!30!white,blur shadow=
{
shadow xshift=1pt,shadow yshift=-1pt
}
] (y) at ([yshift=0.7em]layer.north)
{
\scriptsize
{$
\textrm
{
y
}$}}
;
\draw
[thick,->] (x.north) -- (layer.south);
\draw
[thick,->] (layer.north) -- (y.south);
\node
[anchor=west,align=left] (xshape) at (x.east)
{
\tiny
{
shape: 3*4*5
}}
;
\node
[anchor=west,align=left] (yshape) at (y.east)
{
\tiny
{
shape: 3*4*3
}}
;
\end{tikzpicture}
\end{center}
\end{tcolorbox}
\visible
<2->
{
\begin{itemize}
\item
更简便的定义方式
\item
一个多层网络
\end{itemize}
\
vspace
{
-0.4em
}
\begin{tcolorbox}
[enhanced,frame engine=empty,boxrule=0.1mm,size=title,colback=blue!10!white]
\begin{flushleft}
{
\scriptsize
\
begin{tcolorbox
}
[bicolor,sidebyside,righthand width=4cm,size=title,frame engine=empty,
colback=blue!10!white,colbacklower=black!5!white]
{
\scriptsize
\begin{tabbing}
\texttt
{
XTensor a, b, c, d, e;
}
\hspace
{
8.5em
}
\=
// 声明张量tensor
\\
\texttt
{
InitTensor1D(
\&
a, 10, X
\_
FLOAT);
}
\>
// 10维的浮点型向量
\\
\texttt
{
InitTensor1D(
\&
b, 10);
}
\>
// 10维的向量,缺省类型(浮点)
\\
\texttt
{
InitTensor2D(
\&
c, 10, 20);
}
\>
// 10*20的矩阵(缺省为浮点型)
\\
\texttt
{
InitTensor3D(
\&
d, 10, 20, 30, X
\_
INT);
}
\>
// 10*20*30的整型型3阶张量
\\
\texttt
{
InitTensor4D(
\&
e, 10, 20, 30, 40);
}
\>
// 10*20*30*40的4阶张量
\\
\>
// (缺省为浮点型)
\texttt
{
XTensor x, y, h1, h2;
}
\\
\texttt
{
XTensor w1, b1, w2, w3;
}
\\
\texttt
{
InitTensor3D(
\&
x, 3, 4, 5);
}
\\
\texttt
{
InitTensor2D(
\&
w1, 5, 3);
}
\\
\texttt
{
InitTensor1D(
\&
b1, 3);
}
\\
\texttt
{
InitTensor2D(
\&
w2, 3, 6);
}
\\
\texttt
{
InitTensor2D(
\&
w3, 6, 4);
}
\\
\texttt
{
...
}
\\
\texttt
{
h1 = Sigmoid(MMul(x, w1) + b1);
}
\\
\texttt
{
h2 = HandTanH(MMul(h1, w2));
}
\\
\texttt
{
y = Relu(MMul(h2, w3));
}
\end{tabbing}
}
\end{flushleft}
\tcblower
\begin{center}
\begin{tikzpicture}
\node
[draw,circle,inner sep=2pt,fill=red!30!white,blur shadow=
{
shadow xshift=1pt,shadow yshift=-1pt
}
] (x) at (0,0)
{
\footnotesize
{$
\textrm
{
x
}$}}
;
\node
[anchor=south,draw,rounded corners,inner sep=2pt,minimum width=4em,fill=green!30!white,blur shadow=
{
shadow xshift=1pt,shadow yshift=-1pt
}
] (layer1) at ([yshift=0.7em]x.north)
{
\scriptsize
{
layer1
}}
;
\node
[anchor=south,draw,rounded corners,inner sep=2pt,minimum width=4em,fill=green!30!white,blur shadow=
{
shadow xshift=1pt,shadow yshift=-1pt
}
] (layer2) at ([yshift=1.0em]layer.north)
{
\scriptsize
{
layer2
}}
;
\node
[anchor=south,draw,rounded corners,inner sep=2pt,minimum width=4em,fill=green!30!white,blur shadow=
{
shadow xshift=1pt,shadow yshift=-1pt
}
] (layer3) at ([yshift=1.0em]layer2.north)
{
\scriptsize
{
layer3
}}
;
\node
[anchor=south,draw,circle,inner sep=2pt,fill=red!30!white,blur shadow=
{
shadow xshift=1pt,shadow yshift=-1pt
}
] (y) at ([yshift=0.7em]layer3.north)
{
\scriptsize
{$
\textrm
{
y
}$}}
;
\draw
[thick,->] (x.north) -- (layer1.south);
\draw
[thick,->] (layer1.north) -- (layer2.south);
\draw
[thick,->] (layer2.north) -- (layer3.south);
\draw
[thick,->] (layer3.north) -- (y.south);
\node
[anchor=west,align=left] (xshape) at (x.east)
{
\tiny
{
shape: 3*4*5
}}
;
\node
[anchor=west,align=left] (yshape) at (y.east)
{
\tiny
{
shape: 3*4*4
}}
;
\node
[anchor=south west,align=left,inner sep=2pt] (l1shape) at (layer1.north)
{
\tiny
{
shape: 3*4*3
}}
;
\node
[anchor=south west,align=left,inner sep=2pt] (l2shape) at (layer2.north)
{
\tiny
{
shape: 3*4*6
}}
;
\end{tikzpicture}
\end{center}
\end{tcolorbox}
}
...
...
Section05-Neural-Networks-and-Language-Modeling/section05.tex
查看文件 @
75f5a627
...
...
@@ -2127,10 +2127,10 @@ cycle}
\node
[anchor=north] (xlabel) at (0,-1.2)
{$
\textbf
{
w
}$}
;
}
\visible
<3>
{
\draw
[->,thick] (-1.5in+2em+1.5em,-0.3) .. controls +(east:2) and +(west:1) .. (-0.55,0.8) node [pos=0.5,left]
{
\scriptsize
{
\textbf
{
矩阵乘
}}}
;
}
\visible
<4>
{
\draw
[->,thick] (-1.5in+2em+1.0em,-0.5) .. controls +(east:2) and +(west:1) .. (-0.55,0.8) node [pos=0.5,left]
{
\scriptsize
{
\textbf
{
矩阵乘
}}}
;
}
\visible
<5>
{
\draw
[->,thick] (-1.5in+2em+0.5em,-0.7) .. controls +(east:2.5) and +(west:1) .. (-0.55,0.8) node [pos=0.5,left]
{
\scriptsize
{
\textbf
{
矩阵乘
}}}
;
}
\visible
<6->
{
\draw
[->,thick] (-1.5in+2em,-0.9) .. controls +(east:3) and +(west:1) .. (-0.55,0.8) node [pos=0.5,left]
{
\scriptsize
{
\textbf
{
矩阵乘
}}}
;
}
\visible
<3>
{
\draw
[->,thick
,dashed
] (-1.5in+2em+1.5em,-0.3) .. controls +(east:2) and +(west:1) .. (-0.55,0.8) node [pos=0.5,left]
{
\scriptsize
{
\textbf
{
矩阵乘
}}}
;
}
\visible
<4>
{
\draw
[->,thick
,dashed
] (-1.5in+2em+1.0em,-0.5) .. controls +(east:2) and +(west:1) .. (-0.55,0.8) node [pos=0.5,left]
{
\scriptsize
{
\textbf
{
矩阵乘
}}}
;
}
\visible
<5>
{
\draw
[->,thick
,dashed
] (-1.5in+2em+0.5em,-0.7) .. controls +(east:2.5) and +(west:1) .. (-0.55,0.8) node [pos=0.5,left]
{
\scriptsize
{
\textbf
{
矩阵乘
}}}
;
}
\visible
<6->
{
\draw
[->,thick
,dashed
] (-1.5in+2em,-0.9) .. controls +(east:3) and +(west:1) .. (-0.55,0.8) node [pos=0.5,left]
{
\scriptsize
{
\textbf
{
矩阵乘
}}}
;
}
\end{scope}
\begin{scope}
[yshift=6.5em,xshift=1em+3in]
...
...
@@ -2318,7 +2318,7 @@ cycle}
\ \ \ \ \texttt
{
InitTensor2D(
\&
tensor, 2, 2, X
\_
FLOAT);
}
\>
// 定义张量为2*2的矩阵
\\
\ \ \ \ \texttt
{
tensor.SetDataRand();
}
\>
//
用[0,1]的
均匀分布初始化张量
\\
\ \ \ \ \texttt
{
tensor.SetDataRand();
}
\>
//
[0,1]
均匀分布初始化张量
\\
\ \ \ \ \texttt
{
tensor.Dump(stdout);
}
\>
// 输出张量内容
\\
...
...
@@ -2331,7 +2331,7 @@ cycle}
\end{tcolorbox}
\begin{itemize}
\item
<2-> 运行这个程序会
看到
张量每个元素的值
\item
<2-> 运行这个程序会
显示
张量每个元素的值
\end{itemize}
\visible
<2->
{
...
...
@@ -2349,11 +2349,162 @@ cycle}
\vspace
{
-0.5em
}
\begin{itemize}
\item
<2-> 还可以
看
到:二阶张量(order=2),形状是
$
2
\times
2
$
(dimsize=2,2),数据类型是单精度浮点(dtype=X
\_
FLOAT),是一个非稀疏张量(dense=1.000)
\item
<2-> 还可以
了解
到:二阶张量(order=2),形状是
$
2
\times
2
$
(dimsize=2,2),数据类型是单精度浮点(dtype=X
\_
FLOAT),是一个非稀疏张量(dense=1.000)
\end{itemize}
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% 定义XTensor
\begin{frame}
{
定义XTensor
}
\begin{itemize}
\item
张量由类XTensor表示,利用InitTensor定义,参数:
\begin{itemize}
\item
指向XTensor类型变量的指针
\item
张量的阶
\item
各个方向维度的大小(与传统多维数组约定一样)
\item
张量的数据类型等(有缺省值)
\end{itemize}
\end{itemize}
\vspace
{
-0.0em
}
\begin{tcolorbox}
[enhanced,frame engine=empty,boxrule=0.1mm,size=title,colback=blue!10!white]
\begin{flushleft}
{
\scriptsize
\begin{tabbing}
\texttt
{
XTensor tensor;
}
\hspace
{
12em
}
\=
// 声明张量tensor
\\
\texttt
{
int sizes[6] =
\{
2,3,4,2,3,4
\}
;
}
\>
// 张量的形状为2*3*4*2*3*4
\\
\texttt
{
InitTensor(
\&
tensor, 6, sizes, X
\_
FLOAT);
}
\>
// 定义形状为sizes的6阶张量
\end{tabbing}
}
\end{flushleft}
\end{tcolorbox}
\visible
<2->
{
\begin{itemize}
\item
更简便的定义方式
\end{itemize}
\vspace
{
-0.2em
}
\begin{tcolorbox}
[enhanced,frame engine=empty,boxrule=0.1mm,size=title,colback=blue!10!white]
\begin{flushleft}
{
\scriptsize
\begin{tabbing}
\texttt
{
XTensor a, b, c;
}
\hspace
{
11.5em
}
\=
// 声明张量tensor
\\
\texttt
{
InitTensor1D(
\&
a, 10, X
\_
INT);
}
\>
// 10维的整数型向量
\\
\texttt
{
InitTensor1D(
\&
b, 10);
}
\>
// 10维的向量,缺省类型(浮点)
\\
\texttt
{
InitTensor4D(
\&
c, 10, 20, 30, 40);
}
\>
// 10*20*30*40的4阶张量(浮点)
\end{tabbing}
}
\end{flushleft}
\end{tcolorbox}
}
\visible
<3->
{
\begin{itemize}
\item
直接在GPU上定义张量
\end{itemize}
\vspace
{
-0.2em
}
\begin{tcolorbox}
[enhanced,frame engine=empty,boxrule=0.1mm,size=title,colback=blue!10!white]
\begin{flushleft}
{
\scriptsize
\begin{tabbing}
\texttt
{
XTensor tensorGPU;
}
\hspace
{
10.5em
}
\=
// 声明张量tensor
\\
\texttt
{
InitTensor2D(
\&
tensorGPU, 10, 20,
}
$
\backslash
$
\>
// 在编号为0的GPU上定义张量
\\
\hspace
{
6.7em
}
\texttt
{
X
\_
FLOAT, 0);
}
\end{tabbing}
}
\end{flushleft}
\end{tcolorbox}
}
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% XTensor的代数运算
\begin{frame}
{
代数运算
}
\begin{itemize}
\item
各种单元算子(1阶运算)+、-、*、
$
\backslash
$
、Log、Exp、 Power、Absolute等,还有Sigmoid、Softmax等激活函数
\end{itemize}
\vspace
{
-0.2em
}
\begin{tcolorbox}
[enhanced,frame engine=empty,boxrule=0.1mm,size=title,colback=blue!10!white]
\begin{flushleft}
{
\scriptsize
\begin{tabbing}
\texttt
{
XTensor a, b, c, d, e;
}
\hspace
{
7em
}
\=
// 声明张量tensor
\\
\texttt
{
InitTensor3D(
\&
a, 2, 3, 4);
}
\>
// a为2*3*4的3阶张量
\\
\texttt
{
InitTensor3D(
\&
b, 2, 3, 4);
}
\>
// b为2*3*4的3阶张量
\\
\texttt
{
InitTensor3D(
\&
c, 2, 3, 4);
}
\>
// c为2*3*4的3阶张量
\\
\texttt
{
a.SetDataRand();
}
\>
// 随机初始化a
\\
\texttt
{
b.SetDataRand();
}
\>
// 随机初始化b
\\
\texttt
{
c.SetDataRand();
}
\>
// 随机初始化c
\\
\texttt
{
d = a + b * c;
}
\>
// d被赋值为 a + b * c
\\
\texttt
{
d = ((a + b) * d - b / c ) * d;
}
\>
// d可以被嵌套使用
\\
\texttt
{
e = Sigmoid(d);
}
\>
// d经过激活函数Sigmoid赋值给e
\end{tabbing}
}
\end{flushleft}
\end{tcolorbox}
\visible
<2->
{
\begin{itemize}
\item
高阶运算,最常用的是矩阵乘法(MMul)
\end{itemize}
\vspace
{
-0.2em
}
\begin{tcolorbox}
[enhanced,frame engine=empty,boxrule=0.1mm,size=title,colback=blue!10!white]
\begin{flushleft}
{
\scriptsize
\begin{tabbing}
\texttt
{
XTensor a, b, c;
}
\hspace
{
10.0em
}
\=
// 声明张量tensor
\\
\texttt
{
InitTensor4D(
\&
a, 2, 2, 3, 4);
}
\>
// a为2*2*3*4的4阶张量
\\
\texttt
{
InitTensor2D(
\&
b, 4, 5);
}
\>
// b为4*5的矩阵
\\
\texttt
{
a.SetDataRand();
}
\>
// 随机初始化a
\\
\texttt
{
b.SetDataRand();
}
\>
// 随机初始化b
\\
\texttt
{
c = MMul(a, b);
}
\>
// 矩阵乘的结果为2*2*3*5的4阶张量
\end{tabbing}
}
\end{flushleft}
\end{tcolorbox}
}
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
%%% XTensor的其它函数
\begin{frame}
{
其它常用函数
}
\begin{itemize}
\item
其它函数,列不全,可以参考网站上的详细说明
\end{itemize}
\footnotesize
{
\begin{center}
\begin{tabular}
{
l|l
}
函数
&
描述
\\
\hline
\texttt
{
a.Reshape(o, s)
}
&
把a变换为阶为o、形状为s的张量
\\
\texttt
{
a.Get(pos)
}
&
取张量中位置为pos的元素
\\
\texttt
{
a.Set(v, pos)
}
&
把张量中位置为pos的元素的值设为v
\\
\texttt
{
a.Dump(file)
}
&
把张量存到file中,file为文件句柄
\\
\texttt
{
a.Read(file)
}
&
从file中读取张量,file为文件句柄
\\
\hline
\texttt
{
Power(a, p)
}
&
计算指数
$
\textrm
{
a
}^{
\textrm
{
p
}}$
\\
\texttt
{
Linear(a, s, b)
}
&
计算 a * s + b,s和b都是一个数
\\
\texttt
{
CopyValues(a)
}
&
构建a的一个拷贝
\\
\texttt
{
ReduceMax(a, d)
}
&
对a沿着方向d进行规约,得到最大值
\\
\texttt
{
ReduceSum(a, d)
}
&
对a沿着方向d进行规约,得到和
\\
\texttt
{
Concatenate(a, b, d)
}
&
把两个张量a和b沿d方向级联
\\
\texttt
{
Merge(a, d)
}
&
对张量a沿d方向合并
\\
\texttt
{
Split(a, d, n)
}
&
对张量a沿d方向分裂成n份
\\
\hline
\texttt
{
Sigmoid(a)
}
&
对a进行Sigmoid变换
\\
\texttt
{
Softmax(a)
}
&
对a进行Softmax变换,沿最后一个方向
\\
\texttt
{
HardTanH(a)
}
&
对a进行hard tanh变换(双曲正切的近似)
\\
\texttt
{
Relu(a)
}
&
对a进行Relu变换
\\
\end{tabular}
\end{center}
}
\end{frame}
%%%------------------------------------------------------------------------------------------------------------
\subsection
{
参数学习 - 反向传播
}
...
...
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论