@ -792,15 +792,14 @@ class Model(object):
switched by ` paddle . disable_static ( ) ` . The usage is as follows .
But note , the switching between dynamic and static should be before
instantiating a Model . The input description , i . e , paddle . static . InputSpec ,
must be required for static graph .
must be required .
Args :
network ( paddle . nn . Layer ) : The network is an instance of
paddle . nn . Layer .
inputs ( InputSpec | list | dict | None ) : ` inputs ` , entry points of network ,
could be a InputSpec instance , or lits of InputSpec instances ,
or dict ( { name : InputSpec } ) , or None . For static graph ,
inputs must be set . For dynamic graph , it could be None .
or dict ( { name : InputSpec } ) , and it couldn ' t be None.
labels ( InputSpec | list | None ) : ` labels ` , entry points of network ,
could be a InputSpec instnace or lits of InputSpec instances ,
or None . For static graph , if labels is required in loss ,
@ -849,10 +848,9 @@ class Model(object):
self . _optimizer = None
self . _test_dataloader = None
if not in_dygraph_mode ( ) :
if not isinstance ( inputs , ( list , dict , Input ) ) :
raise TypeError (
" ' inputs ' must be list or dict in static graph mode " )
if not isinstance ( inputs , ( list , dict , Input ) ) :
raise TypeError (
" ' inputs ' must be list or dict, and couldn ' t be None. " )
self . _inputs = self . _verify_spec ( inputs , True )
self . _labels = self . _verify_spec ( labels )
@ -1004,11 +1002,7 @@ class Model(object):
have no variable need to save ( like SGD ) , the fill will not generated ) .
This function will silently overwrite existing file at the target location .
If ` training ` is set to False , only inference model will be saved . It
should be noted that before using ` save ` , you should run the model , and
the shape of input you saved is as same as the input of its running .
` @paddle.jit.to_static ` must be added on ` forward ` function of your layer
in dynamic mode now and these will be optimized later .
If ` training ` is set to False , only inference model will be saved .
Args :
path ( str ) : The file prefix to save model . The format is
@ -1037,8 +1031,6 @@ class Model(object):
nn . Linear ( 200 , 10 ) ,
nn . Softmax ( ) )
# If save for inference in dygraph, need this
@paddle.jit.to_static
def forward ( self , x ) :
return self . net ( x )
@ -1046,7 +1038,7 @@ class Model(object):
device = paddle . set_device ( ' cpu ' )
# if use static graph, do not set
paddle . disable_static ( device ) if dynamic else None
# inputs and labels are not required for dynamic graph.
input = InputSpec ( [ None , 784 ] , ' float32 ' , ' x ' )
label = InputSpec ( [ None , 1 ] , ' int64 ' , ' label ' )
model = paddle . Model ( Mnist ( ) , input , label )
@ -1649,10 +1641,6 @@ class Model(object):
model_only = False ) :
"""
Save inference model can be in static or dynamic mode .
It should be noted that before using ` save_inference_model ` , you should
run the model , and the shape you saved is as same as the input of its
running . ` @paddle.jit.to_static ` must be added on ` forward ` function of
your layer in dynamic mode now and these will be optimized later .
Args :
save_dir ( str ) : The directory path to save the inference model .
@ -1678,14 +1666,11 @@ class Model(object):
return result_list
# TODO:
# 1. Make it Unnecessary to run model before calling `save_inference_model` for users in dygraph.
# 2. Save correct shape of input, now the interface stores the shape that the user sent to
# the inputs of the model in running.
# 3. Make it Unnecessary to add `@paddle.jit.to_static` for users in dynamic mode.
if fluid . in_dygraph_mode ( ) :
with fluid . framework . _dygraph_guard ( None ) :
layer = self . network
layer . forward = paddle . jit . to_static (
layer . forward , input_spec = self . _inputs )
# 1. input check
prog_translator = ProgramTranslator ( )
@ -1879,18 +1864,7 @@ class Model(object):
def _verify_spec ( self , specs , is_input = False ) :
out_specs = [ ]
if specs is None :
# Note(Aurelius84): If not specific specs of `Input`, using argument names of `forward` function
# to generate `Input`. But how can we know the actual shape of each input tensor?
if is_input :
out_specs = [
Input (
name = n , shape = [ None ] )
for n in extract_args ( self . network . forward ) if n != ' self '
]
else :
out_specs = to_list ( specs )
elif isinstance ( specs , dict ) :
if isinstance ( specs , dict ) :
assert is_input == False
out_specs = [ specs [ n ] \
for n in extract_args ( self . network . forward ) if n != ' self ' ]