@@ -59,24 +59,14 @@ void BatchNormalizationLayer::calMeanAndStd(const MatrixPtr& mat) {
5959
6060void BatchNormalizationLayer::calMovingMeanAndVar () {
6161 // calculating and saving moving mean and variance
62- MatrixPtr movingMean = movingMean_->getW ();
63- MatrixPtr movingVar = movingVar_->getW ();
64-
65- if (!useGpu_ && FLAGS_trainer_count > 1 ) {
66- auto mvMean = std::dynamic_pointer_cast<SharedCpuMatrix>(movingMean);
67- auto mvVar = std::dynamic_pointer_cast<SharedCpuMatrix>(movingVar);
68- CHECK (mvMean && mvVar);
69-
70- mvMean->add (*savedMean_, movingAvgFraction_, 1.0 - movingAvgFraction_);
71- mvVar->add (*savedInvVar_, movingAvgFraction_, 1.0 - movingAvgFraction_);
72- } else {
73- // movingMean = movingMean * movingAvgFraction_
74- // + savedMean_ * (1 - movingAvgFraction_)
75- movingMean->add (*savedMean_, movingAvgFraction_, 1.0 - movingAvgFraction_);
76- // movingVar = movingVar * movingAvgFraction_
77- // + savedInvVar_ * (1 - movingAvgFraction_)
78- movingVar->add (*savedInvVar_, movingAvgFraction_, 1.0 - movingAvgFraction_);
79- }
62+ auto & movingMean = movingMean_->getW ();
63+ auto & movingVar = movingVar_->getW ();
64+ // movingMean = movingMean * movingAvgFraction_
65+ // + savedMean_ * (1 - movingAvgFraction_)
66+ movingMean->add (*savedMean_, movingAvgFraction_, 1.0 - movingAvgFraction_);
67+ // movingVar = movingVar * movingAvgFraction_
68+ // + savedInvVar_ * (1 - movingAvgFraction_)
69+ movingVar->add (*savedInvVar_, movingAvgFraction_, 1.0 - movingAvgFraction_);
8070}
8171
8272void BatchNormalizationLayer::setMeanAndStd () {
0 commit comments