error message enhancement for Linear, test=develop (#23595)

revert-23830-2.0-beta
zhongpu 5 years ago committed by GitHub
parent c1c8c7e431
commit d40c52403d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -939,6 +939,10 @@ class Linear(layers.Layer):
return dygraph_utils._append_activation_in_dygraph(pre_act,
self._act)
check_variable_and_dtype(input, 'input',
['float16', 'float32', 'float64'], "Linear")
attrs = {
"x_num_col_dims": len(input.shape) - 1,
"y_num_col_dims": 1,

@ -155,6 +155,31 @@ class TestLayer(LayerTest):
self.assertTrue(np.array_equal(static_ret, dy_ret_value))
with self.static_graph():
# the input of Linear must be Variable.
def test_Variable():
inp = np.ones([3, 32, 32], dtype='float32')
linear = nn.Linear(
32,
4,
bias_attr=fluid.initializer.ConstantInitializer(value=1))
linear_ret1 = linear(inp)
self.assertRaises(TypeError, test_Variable)
# the input dtype of Linear must be float16 or float32 or float64
# float16 only can be set on GPU place
def test_type():
inp = np.ones([3, 32, 32], dtype='int32')
linear = nn.Linear(
32,
4,
bias_attr=fluid.initializer.ConstantInitializer(value=1))
linear_ret2 = linear(inp)
self.assertRaises(TypeError, test_type)
def test_layer_norm(self):
inp = np.ones([3, 32, 32], dtype='float32')
with self.static_graph():

Loading…
Cancel
Save