Commit a84f1cb0 by xiaotong

add maps of parameters and gradients for a group of models

parent ee577b38
......@@ -95,8 +95,6 @@ void TestTrain()
TTModel model;
model.Init(config, serverDevID);
tmpTT = model.params[0].param;
XOptimizer optimizer;
optimizer.Init(config);
......
......@@ -40,11 +40,15 @@ namespace nts {
XLeader::XLeader()
{
id = -1;
paramMap = NULL;
gradMap = NULL;
modelNum = 0;
}
/* de-constructor */
XLeader::~XLeader()
{
DestroyParamMap();
}
/* intialize the leader */
......@@ -402,6 +406,26 @@ bool XLeader::Run(XConfig * config, DataDistributeBase * dataDistributor, XOptim
return activeJobCount > 0;
}
/* destroy the parameter map (and gradient map) */
void XLeader::DestroyParamMap()
{
for(int i = 0; i < modelNum; i++){
delete[] paramMap[i];
delete[] gradMap[i];
}
delete[] paramMap;
delete[] gradMap;
modelNum = 0;
}
/* generate the map of parameters */
void XLeader::MakeParamMap()
{
DestroyParamMap();
for(int i = 0; i < jworkers.count; i++){
}
}
/*
run the model
......
......@@ -94,6 +94,19 @@ protected:
parameter workers. So they are actually pipelines
of jobs. */
XList pworkers;
/* map of parameters. (x,y) indexes parameter x of
worker y. Note that a worker keeps a copy of the
parameters and runs back-propagation to obtain the
gradient of the loss with respect to the parameters. */
XTensorKeeper ** paramMap;
/* map of parameter gradients. (x,y) indexes the
gradient of parameter x of worker y. */
XTensorKeeper ** gradMap;
/* number of model copies for paramMap and gradMap */
int modelNum;
public:
/* constructor */
......@@ -161,6 +174,12 @@ public:
/* add a parameter worker (or a pipeline) */
void AddJobParamterWorker(int n);
/* destroy the parameter map (and gradient map) */
void DestroyParamMap();
/* generate the map of parameters */
void MakeParamMap();
/* run the model and update it (for one time) */
bool Run(XConfig * config, DataDistributeBase * dataDistributor, XOptimizer * optimizer);
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论