Skip to content
项目
群组
代码片段
帮助
当前项目
正在载入...
登录 / 注册
切换导航面板
N
NiuTrans.Tensor
概览
Overview
Details
Activity
Cycle Analytics
版本库
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
问题
0
Issues
0
列表
Board
标记
里程碑
合并请求
0
Merge Requests
0
CI / CD
CI / CD
流水线
作业
日程表
图表
维基
Wiki
代码片段
Snippets
成员
Collapse sidebar
Close sidebar
活动
图像
聊天
创建新问题
作业
提交
Issue Boards
Open sidebar
杨迪
NiuTrans.Tensor
Commits
28f66400
Commit
28f66400
authored
Jul 18, 2018
by
xiaotong
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
remove the files
parent
e6441f30
隐藏空白字符变更
内嵌
并排
正在显示
2 个修改的文件
包含
0 行增加
和
216 行删除
+0
-216
source/network/XBackwardNode.cpp
+0
-144
source/network/XBackwardNode.h
+0
-72
没有找到文件。
source/network/XBackwardNode.cpp
deleted
100644 → 0
查看文件 @
e6441f30
/* NiuTrans.Tensor - an open-source tensor library
* Copyright (C) 2018, Natural Language Processing Lab, Northestern University.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Created by: XIAO Tong (xiaotong@mail.neu.edu.cn) 2018-07-17
*/
#include "XBackwardNode.h"
#include "../tensor/core/CHeader.h"
#include "../tensor/XName.h"
namespace
nts
{
/* make gradient tensor for a node */
void
XNoder
::
MakeGrad
(
XTensor
*
node
)
{
if
(
node
==
NULL
)
return
;
if
(
!
XTensor
::
IsIdentical
(
node
,
node
->
grad
)){
delete
node
->
grad
;
node
->
grad
=
NewTensor
(
node
);
}
}
/* the node is a leaf node (intput) or not */
bool
XNoder
::
IsLeaf
(
XTensor
*
node
)
{
if
(
node
==
NULL
)
return
false
;
if
(
node
->
income
.
tailNum
==
0
)
return
true
;
else
return
false
;
}
/* the node is a root node (output) or not */
bool
XNoder
::
IsRoot
(
XTensor
*
node
)
{
if
(
node
==
NULL
)
return
false
;
if
(
node
->
outgo
.
tailNum
==
0
)
return
true
;
else
return
false
;
}
/* the node keeps the gradinent or not */
bool
XNoder
::
IsGrad
(
XTensor
*
node
)
{
if
(
node
==
NULL
)
return
false
;
if
(
node
->
isGrad
)
return
true
;
else
return
false
;
}
/*
compute dE/dx of a node
Note that input of the node could be arbitrary tensors
>> node - node of the network
*/
void
XNodeGrad
::
Compute
(
XTensor
*
node
)
{
if
(
node
==
NULL
||
node
->
visitMark
==
NODE_FINISHED
)
return
;
if
(
!
XNoder
::
IsLeaf
(
node
)){
}
node
->
visitMark
=
NODE_FINISHED
;
}
/* indicates whether the node is for a math operation */
bool
XNodeGrad
::
IsMathOP
(
XTensor
*
node
)
{
XLink
&
income
=
node
->
income
;
return
(
income
.
typeID
&
MATH_BASE
)
!=
0
;
}
/* compute dE/dx as a math operation, e.g., sum, multiply ... */
void
XNodeGrad
::
ComputeMath
(
XTensor
*
node
)
{
CheckNTErrors
(
node
->
grad
!=
NULL
,
"No gradient found!"
);
XLink
&
income
=
node
->
income
;
int
operID
=
income
.
typeID
;
/* c = a + b * \beta
dE/da = dE/dc
dE/db = dE/dc * \beta */
if
(
operID
==
MATH_SUM
){
CheckNTErrors
(
income
.
tailNum
==
2
,
"Wrong input tensor number for SUM!"
);
XTensor
*
a
=
income
.
tails
[
0
];
XTensor
*
b
=
income
.
tails
[
1
];
DTYPE
beta
=
income
.
GetParam
(
0
);
XNoder
::
MakeGrad
(
a
);
XNoder
::
MakeGrad
(
b
);
_CopyValues
(
node
->
grad
,
a
->
grad
);
if
(
beta
!=
1.0
F
)
_ScaleAndShift
(
node
->
grad
,
a
->
grad
,
beta
);
else
_CopyValues
(
node
->
grad
,
b
->
grad
);
}
/* c = a * b
dE/da = dE/dc * b
dE/db = dE/dc * a */
else
if
(
operID
==
MATH_MULTIPLY
){
CheckNTErrors
(
income
.
tailNum
==
2
,
"Wrong input tensor number for MULTIPLY!"
);
XTensor
*
a
=
income
.
tails
[
0
];
XTensor
*
b
=
income
.
tails
[
1
];
XNoder
::
MakeGrad
(
a
);
XNoder
::
MakeGrad
(
b
);
CheckNTErrors
(
XTensor
::
IsIdentical
(
a
,
b
),
"Wrong sized input tensors!"
);
_Multiply
(
node
->
grad
,
b
,
a
->
grad
);
_Multiply
(
node
->
grad
,
a
,
b
->
grad
);
}
else
{
ShowNTErrors
(
"TODO!"
);
}
}
}
source/network/XBackwardNode.h
deleted
100644 → 0
查看文件 @
e6441f30
/* NiuTrans.Tensor - an open-source tensor library
* Copyright (C) 2018, Natural Language Processing Lab, Northestern University.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Created by: XIAO Tong (xiaotong@mail.neu.edu.cn) 2018-07-17
*/
#include "../tensor/XTensor.h"
#include "../tensor/function/FHeader.h"
#ifndef __XBACKWARDNODE_H__
#define __XBACKWARDNODE_H__
namespace
nts
{
#define NODE_UNFINISHED 0
#define NODE_DOING 1
#define NODE_FINISHED 2
/* node management */
class
XNoder
{
public
:
/* make gradient tensor for a node */
static
void
MakeGrad
(
XTensor
*
node
);
/* the node is a leaf node (intput) or not */
static
bool
IsLeaf
(
XTensor
*
node
);
/* the node is a root node (output) or not */
static
bool
IsRoot
(
XTensor
*
node
);
/* the node keeps the gradinent or not */
static
bool
IsGrad
(
XTensor
*
node
);
};
/* this class computes the gradient for each node in the network */
class
XNodeGrad
{
public
:
/* compute dE/dx of a node */
void
Compute
(
XTensor
*
node
);
/* indicates whether the node is for a math operation */
bool
IsMathOP
(
XTensor
*
node
);
/* compute dE/dx as a math operation, e.g., sum, multiply ... */
void
ComputeMath
(
XTensor
*
node
);
};
}
#endif
\ No newline at end of file
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论