[Dy2Static]Enhance check of TracedLayers out vars (#30576)

revert-31068-fix_conv3d_windows
Aurelius84 5 years ago committed by GitHub
parent d1b25ed9d7
commit 5067e3a8d2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -53,21 +53,21 @@ def create_program_from_desc(program_desc):
return program return program
def _extract_vars(inputs, result_list): def _extract_vars(inputs, result_list, err_tag='inputs'):
if isinstance(inputs, Variable): if isinstance(inputs, Variable):
result_list.append(inputs) result_list.append(inputs)
elif isinstance(inputs, (list, tuple)): elif isinstance(inputs, (list, tuple)):
for var in inputs: for var in inputs:
_extract_vars(var, result_list) _extract_vars(var, result_list, err_tag)
else: else:
raise TypeError( raise TypeError(
"The type of 'each element of inputs' in fluid.dygraph.jit.TracedLayer.trace must be fluid.Variable, but received {}.". "The type of 'each element of {}' in fluid.dygraph.jit.TracedLayer.trace must be fluid.Variable, but received {}.".
format(type(inputs))) format(err_tag, type(inputs)))
def extract_vars(inputs): def extract_vars(inputs, err_tag='inputs'):
result_list = [] result_list = []
_extract_vars(inputs, result_list) _extract_vars(inputs, result_list, err_tag)
return result_list return result_list
@ -1032,7 +1032,7 @@ def _trace(layer,
outputs = [original_outputs] outputs = [original_outputs]
else: else:
outputs = original_outputs outputs = original_outputs
out_vars = [var for var in outputs] out_vars = extract_vars(outputs, err_tag='outputs')
program_desc, feed_names, fetch_names, parameters = tracer.create_program_desc( program_desc, feed_names, fetch_names, parameters = tracer.create_program_desc(
var_list, feed_prefix, out_vars, fetch_prefix, tmp_prefix) var_list, feed_prefix, out_vars, fetch_prefix, tmp_prefix)

@ -13,16 +13,18 @@
# limitations under the License. # limitations under the License.
import numpy as np import numpy as np
import paddle
import paddle.fluid as fluid import paddle.fluid as fluid
import six import six
import unittest import unittest
import paddle.nn as nn
class SimpleFCLayer(fluid.dygraph.Layer): class SimpleFCLayer(nn.Layer):
def __init__(self, feature_size, batch_size, fc_size): def __init__(self, feature_size, batch_size, fc_size):
super(SimpleFCLayer, self).__init__() super(SimpleFCLayer, self).__init__()
self._linear = fluid.dygraph.Linear(feature_size, fc_size) self._linear = nn.Linear(feature_size, fc_size)
self._offset = fluid.dygraph.to_variable( self._offset = paddle.to_tensor(
np.random.random((batch_size, fc_size)).astype('float32')) np.random.random((batch_size, fc_size)).astype('float32'))
def forward(self, x): def forward(self, x):
@ -30,6 +32,17 @@ class SimpleFCLayer(fluid.dygraph.Layer):
return fc + self._offset return fc + self._offset
class LinearNetWithNone(nn.Layer):
def __init__(self, feature_size, fc_size):
super(LinearNetWithNone, self).__init__()
self._linear = nn.Linear(feature_size, fc_size)
def forward(self, x):
fc = self._linear(x)
return [fc, [None, 2]]
class TestTracedLayerErrMsg(unittest.TestCase): class TestTracedLayerErrMsg(unittest.TestCase):
def setUp(self): def setUp(self):
self.batch_size = 4 self.batch_size = 4
@ -152,5 +165,14 @@ class TestTracedLayerErrMsg(unittest.TestCase):
return layer return layer
class TestOutVarWithNoneErrMsg(unittest.TestCase):
def test_linear_net_with_none(self):
model = LinearNetWithNone(100, 16)
in_x = paddle.to_tensor(np.random.random((4, 100)).astype('float32'))
with self.assertRaises(TypeError):
dygraph_out, traced_layer = fluid.dygraph.TracedLayer.trace(model,
[in_x])
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

Loading…
Cancel
Save