From 6c92282e5e2d4517a3aeb00fd8126da494bac6b1 Mon Sep 17 00:00:00 2001 From: jiangjinsheng Date: Wed, 10 Jun 2020 12:04:27 +0800 Subject: [PATCH] fixed LeakyReLU --- mindspore/nn/layer/activation.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mindspore/nn/layer/activation.py b/mindspore/nn/layer/activation.py index fe98ca296a..0c4101e88b 100644 --- a/mindspore/nn/layer/activation.py +++ b/mindspore/nn/layer/activation.py @@ -249,11 +249,11 @@ class LeakyReLU(Cell): self.alpha = alpha def construct(self, x): - alpha = P.Cast()(F.scalar_to_array(self.alpha), P.DType()(x)) - if alpha <= 1: - out = P.Maximum()(alpha * x, x) + alpha_array = P.Cast()(F.scalar_to_array(self.alpha), P.DType()(x)) + if self.alpha <= 1: + out = P.Maximum()(alpha_array * x, x) else: - out = P.Minimum()(alpha * x, x) + out = P.Minimum()(alpha_array * x, x) return out