From d8ceb23e62116534d65c577ed4ebff036a2599dd Mon Sep 17 00:00:00 2001 From: zhaojichen Date: Thu, 17 Sep 2020 10:17:42 +0800 Subject: [PATCH] fix globalbatchnorm bug --- mindspore/nn/layer/normalization.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mindspore/nn/layer/normalization.py b/mindspore/nn/layer/normalization.py index fffc0b8bc1..ed1180b618 100644 --- a/mindspore/nn/layer/normalization.py +++ b/mindspore/nn/layer/normalization.py @@ -410,7 +410,7 @@ class GlobalBatchNorm(_BatchNorm): Args: num_features (int): `C` from an expected input of size (N, C, H, W). - device_num_each_group (int): The number of devices in each group. Default: 1. + device_num_each_group (int): The number of devices in each group. Default: 2. eps (float): A value added to the denominator for numerical stability. Default: 1e-5. momentum (float): A floating hyperparameter of the momentum for the running_mean and running_var computation. Default: 0.9. @@ -453,7 +453,7 @@ class GlobalBatchNorm(_BatchNorm): moving_mean_init='zeros', moving_var_init='ones', use_batch_statistics=None, - device_num_each_group=1): + device_num_each_group=2): super(GlobalBatchNorm, self).__init__(num_features, eps, momentum,