From 1741721240ea3fbef262543612559c246ba7082a Mon Sep 17 00:00:00 2001 From: chenfei Date: Wed, 16 Sep 2020 21:06:05 +0800 Subject: [PATCH] rm arg 'after_fake' of Conv2dBnAct --- mindspore/nn/layer/quant.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mindspore/nn/layer/quant.py b/mindspore/nn/layer/quant.py index 332423a1d9..412c659b2a 100644 --- a/mindspore/nn/layer/quant.py +++ b/mindspore/nn/layer/quant.py @@ -85,9 +85,14 @@ class Conv2dBnAct(Cell): Initializer and string are the same as 'weight_init'. Refer to the values of Initializer for more details. Default: 'zeros'. has_bn (bool): Specifies to used batchnorm or not. Default: False. + momentum (float): Momentum for moving average.Momentum value should be [0, 1].Default:0.9 + eps (float): Term added to the denominator to improve numerical stability. Should be greater than 0. Default: + 1e-5. activation (Cell): Specifies activation type. The optional values are as following: 'softmax', 'logsoftmax', 'relu', 'relu6', 'tanh', 'gelu', 'sigmoid', 'prelu', 'leakyrelu', 'hswish', 'hsigmoid'. Default: None. + alpha (float): Slope of the activation function at x < 0. Default: 0.2. + after_fake(bool): Determin whether there should be a fake quantization operation after Cond2dBnAct. Inputs: - **input** (Tensor) - Tensor of shape :math:`(N, C_{in}, H_{in}, W_{in})`. @@ -171,6 +176,7 @@ class DenseBnAct(Cell): activation (string): Specifies activation type. The optional values are as following: 'Softmax', 'LogSoftmax', 'ReLU', 'ReLU6', 'Tanh', 'GELU', 'Sigmoid', 'PReLU', 'LeakyReLU', 'h-Swish', and 'h-Sigmoid'. Default: None. + after_fake(bool): Determin whether there should be a fake quantization operation after DenseBnAct. Inputs: - **input** (Tensor) - Tensor of shape :math:`(N, in\_channels)`.