Commit 61c4d15c by xiaotong

introduce small-footprint backward computation (by using "-smallfootprint")

parent f8c57160
......@@ -116,6 +116,7 @@ void T2TTrainer::Init(int argc, char ** argv)
LoadParamBool(argc, argv, "doubledend", &isDoubledEnd, false);
LoadParamBool(argc, argv, "smallbatch", &isSmallBatch, true);
LoadParamBool(argc, argv, "bigbatch", &isBigBatch, false);
LoadParamBool(argc, argv, "smallfootprint", &isSmallFootprint, false);
buf = new int[bufSize];
buf2 = new int[bufSize];
......@@ -164,6 +165,9 @@ void T2TTrainer::Train(const char * fn, const char * validFN, const char * model
XMem * mem = model->mem;
XNet net;
if(isSmallFootprint)
net.SetGradEfficientFlag();
PrepareModel(model);
double startT = GetClockSec();
......@@ -213,8 +217,6 @@ void T2TTrainer::Train(const char * fn, const char * validFN, const char * model
ShowNTErrors("Illegal model type!");
}
net.ShowNetwork(stderr, &output);
/* back-propagation for obtaining gradients */
if (labelSmoothingP > 0)
LabelSmooth(&gold, &goldSmoothed, labelSmoothingP);
......
......@@ -142,6 +142,9 @@ public:
/* counterpart of "isSmallBatch" */
bool isBigBatch;
/* indicates whether we use small memory footprint for backward process */
bool isSmallFootprint;
public:
/* constructor */
T2TTrainer();
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论