fix some quant op bug

pull/1739/head
wangdongxu6 5 years ago committed by 王东旭
parent b384106153
commit 9eee157c58

File diff suppressed because it is too large Load Diff

@ -85,7 +85,7 @@ def get_bprop_batchnorm_fold2(self):
@bprop_getters.register(P.BatchNormFoldD)
def get_bprop_BatchNormFold(self):
"""Generate bprop for BatchNormFold for Ascend"""
op = P.BatchNormFoldGrad_(self.epsilon, self.is_training, self.freeze_bn)
op = P.BatchNormFoldGradD(self.epsilon, self.is_training, self.freeze_bn)
def bprop(x, x_sum, x_square_sum, mean, variance, out, dout):
dx = op(dout[1], dout[2], x, out[1], out[2])

@ -16,6 +16,7 @@
"""_BatchNormFold op"""
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
import te
from te import tvm
from topi import generic
from topi.cce import util

@ -31,8 +31,11 @@ __all__ = ["FakeQuantWithMinMax",
"BatchNormFold2",
"BatchNormFold2Grad",
"BatchNormFoldD",
"BatchNormFoldGradD",
"BNTrainingReduce",
"BatchNormFold2_D",
"BatchNormFold2GradD",
"BatchNormFold2GradReduce",
"FakeQuantWithMinMaxUpdate",
]

Loading…
Cancel
Save