diff --git a/paddle/operators/math/activation_functor.h b/paddle/operators/math/activation_functor.h index 7e15607f46..1e9bdd142e 100644 --- a/paddle/operators/math/activation_functor.h +++ b/paddle/operators/math/activation_functor.h @@ -61,9 +61,9 @@ struct ExpGrad { const framework::Tensor& X, const framework::Tensor& Y, const framework::Tensor& dY, framework::Tensor* dX) { auto dx = framework::EigenVector::Flatten(*dX); - auto dy = framework::EigenVector::Flatten(dY); + auto y = framework::EigenVector::Flatten(Y); auto* place = device_context.template get_eigen_device(); - dx.device(*place) = dy.exp(); + dx.device(*place) = y; } }; diff --git a/python/paddle/v2/framework/tests/test_relu_op.py b/python/paddle/v2/framework/tests/test_relu_op.py index 07b7113d79..58a0872db4 100644 --- a/python/paddle/v2/framework/tests/test_relu_op.py +++ b/python/paddle/v2/framework/tests/test_relu_op.py @@ -3,9 +3,9 @@ import numpy as np from op_test import OpTest -class TestExp(OpTest): +class TestRelu(OpTest): def setUp(self): - self.op_type = "exp" + self.op_type = "relu" self.inputs = { 'X': np.random.uniform(-1, 1, [11, 17]).astype("float32") }