Fix the scale fp16 dtype support for scale op, delete the fp16 test caseraise for scale

Fix the scale fp16 dtype support for scale op, and delete the test case for fp16 case
revert-22778-infer_var_type
wawltor 5 years ago committed by GitHub
parent 648f1d5b3f
commit 17ec3ab23e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -10713,10 +10713,6 @@ def scale(x, scale=1.0, bias=0.0, bias_after_scale=True, act=None, name=None):
"""
check_variable_and_dtype(
x, "x",
['float32', 'float64', 'uint8', 'int16', 'int32', 'in64', 'uint8'],
"scale")
if in_dygraph_mode():
_scale = scale.numpy().item(0) if isinstance(scale, Variable) else scale
out = core.ops.scale(x, 'scale',
@ -10724,6 +10720,10 @@ def scale(x, scale=1.0, bias=0.0, bias_after_scale=True, act=None, name=None):
float(bias), 'bias_after_scale', bias_after_scale)
return dygraph_utils._append_activation_in_dygraph(out)
check_variable_and_dtype(x, "x", [
'float16', 'float32', 'float64', 'int8', 'int16', 'int32', 'int64',
'uint8'
], "scale")
inputs = {'X': [x]}
attrs = {
'bias': float(bias),

@ -131,12 +131,6 @@ class TestScaleRaiseError(unittest.TestCase):
self.assertRaises(TypeError, test_type)
def test_dtype():
data = fluid.data(shape=[10], dtype="float16", name="input")
fluid.layers.scale(data)
self.assertRaises(TypeError, test_dtype)
# Add FP16 test
@unittest.skipIf(not core.is_compiled_with_cuda(),

Loading…
Cancel
Save