From b50a50761760d124aa4a38c81599a1069bc6fbf0 Mon Sep 17 00:00:00 2001
From: qijun <qijun1994@hotmail.com>
Date: Wed, 13 Sep 2017 17:45:11 +0800
Subject: [PATCH] add activation operator python test

---
 paddle/operators/math/activation_functor.h       | 4 ++--
 python/paddle/v2/framework/tests/test_relu_op.py | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/paddle/operators/math/activation_functor.h b/paddle/operators/math/activation_functor.h
index 7e15607f46..1e9bdd142e 100644
--- a/paddle/operators/math/activation_functor.h
+++ b/paddle/operators/math/activation_functor.h
@@ -61,9 +61,9 @@ struct ExpGrad {
                   const framework::Tensor& X, const framework::Tensor& Y,
                   const framework::Tensor& dY, framework::Tensor* dX) {
     auto dx = framework::EigenVector<T>::Flatten(*dX);
-    auto dy = framework::EigenVector<T>::Flatten(dY);
+    auto y = framework::EigenVector<T>::Flatten(Y);
     auto* place = device_context.template get_eigen_device<Place>();
-    dx.device(*place) = dy.exp();
+    dx.device(*place) = y;
   }
 };
 
diff --git a/python/paddle/v2/framework/tests/test_relu_op.py b/python/paddle/v2/framework/tests/test_relu_op.py
index 07b7113d79..58a0872db4 100644
--- a/python/paddle/v2/framework/tests/test_relu_op.py
+++ b/python/paddle/v2/framework/tests/test_relu_op.py
@@ -3,9 +3,9 @@ import numpy as np
 from op_test import OpTest
 
 
-class TestExp(OpTest):
+class TestRelu(OpTest):
     def setUp(self):
-        self.op_type = "exp"
+        self.op_type = "relu"
         self.inputs = {
             'X': np.random.uniform(-1, 1, [11, 17]).astype("float32")
         }