tune max relative error for sigmoid op unit test.

revert-3824-remove_grad_op_type
dangqingqing 8 years ago
parent 18dcc1c0bc
commit 5181aefc6b

@ -37,7 +37,7 @@ class SigmoidKernel : public framework::OpKernel {
auto Y = EigenVector<T>::Flatten(*output);
auto place = context.GetEigenDevice<Place>();
Y.device(place) = 1.0 / (1.0 + (-1.0 * X).exp());
Y.device(place) = 1. / (1. + (-X).exp());
}
};

@ -188,10 +188,10 @@ class GradientChecker(unittest.TestCase):
outputs = backward_op.outputs()
out_names = [item for k in outputs for item in outputs[k]]
cpu_grads = self.get_grad(forward_op, backward_op, input_value,
out_names, core.CPUPlace())
gpu_grads = self.get_grad(forward_op, backward_op, input_value,
out_names, core.GPUPlace(0))
cpu_grads = self.__get_gradient(forward_op, backward_op, input_value,
out_names, core.CPUPlace())
gpu_grads = self.__get_gradient(forward_op, backward_op, input_value,
out_names, core.GPUPlace(0))
for c_grad, g_grad, name in itertools.izip(cpu_grads, gpu_grads,
out_names):
@ -277,8 +277,8 @@ class GradientChecker(unittest.TestCase):
check_names = [grad_var_name(name) for name in inputs_to_check]
for place in places:
# get analytical gradients according to different device
analytic_grads = self.get_grad(forward_op, backward_op, input_vars,
check_names, place)
analytic_grads = self.__get_gradient(forward_op, backward_op,
input_vars, check_names, place)
self.__assert_is_close(numeric_grads, analytic_grads, check_names,
max_relative_error,
"Gradient Check On %s" % str(place))

@ -14,14 +14,14 @@ class TestSigmoidOp(unittest.TestCase):
class TestSigmoidGradOp(GradientChecker):
def test_compare_grad(self):
def test_grad(self):
op = create_op("sigmoid")
inputs = {"X": np.random.random((11, 17)).astype("float32")}
inputs = {"X": np.random.uniform(0.1, 1, [11, 17]).astype("float32")}
# compare gpu and cpu results for backward op.
# skip this test if only compiling CPU version.
# this test will be skiped if only compiling CPU version.
self.compare_grad(op, inputs)
# check gradients
self.check_grad(op, inputs, set("X"), "Y")
self.check_grad(op, inputs, set("X"), "Y", max_relative_error=0.007)
if __name__ == '__main__':

Loading…
Cancel
Save