Polish jit.save/load design & remove paddle.SaveLoadConfig (#27623)

* replace config by kwargs

* change save path form dir to prefix

* fix failed unittests

* revert unittest name change

* polish en docs

* add more tests for coverage
my_2.0rc
Chen Weihang 4 years ago committed by GitHub
parent 74d3a55072
commit 9b49f02441
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -235,7 +235,6 @@ from .framework import grad #DEFINE_ALIAS
from .framework import no_grad #DEFINE_ALIAS
from .framework import save #DEFINE_ALIAS
from .framework import load #DEFINE_ALIAS
from .framework import SaveLoadConfig #DEFINE_ALIAS
from .framework import DataParallel #DEFINE_ALIAS
from .framework import NoamDecay #DEFINE_ALIAS

@ -31,6 +31,7 @@ from paddle.fluid.dygraph.nn import Conv2D
from paddle.fluid.dygraph.nn import Pool2D
from paddle.fluid.dygraph.nn import Linear
from paddle.fluid.log_helper import get_logger
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
paddle.enable_static()
@ -231,10 +232,11 @@ class TestImperativeQat(unittest.TestCase):
before_save = lenet(test_img)
# save inference quantized model
path = "./mnist_infer_model"
path = "./qat_infer_model/lenet"
save_dir = "./qat_infer_model"
paddle.jit.save(
layer=lenet,
model_path=path,
path=path,
input_spec=[
paddle.static.InputSpec(
shape=[None, 1, 28, 28], dtype='float32')
@ -245,12 +247,12 @@ class TestImperativeQat(unittest.TestCase):
else:
place = core.CPUPlace()
exe = fluid.Executor(place)
[inference_program, feed_target_names, fetch_targets] = (
fluid.io.load_inference_model(
dirname=path,
executor=exe,
model_filename="__model__",
params_filename="__variables__"))
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(
dirname=save_dir,
executor=exe,
model_filename="lenet" + INFER_MODEL_SUFFIX,
params_filename="lenet" + INFER_PARAMS_SUFFIX)
after_save, = exe.run(inference_program,
feed={feed_target_names[0]: test_data},
fetch_list=fetch_targets)
@ -339,7 +341,7 @@ class TestImperativeQat(unittest.TestCase):
paddle.jit.save(
layer=lenet,
model_path="./dynamic_mnist",
path="./dynamic_mnist/model",
input_spec=[
paddle.static.InputSpec(
shape=[None, 1, 28, 28], dtype='float32')

@ -31,6 +31,7 @@ from paddle.fluid.dygraph.nn import Conv2D
from paddle.fluid.dygraph.nn import Pool2D
from paddle.fluid.dygraph.nn import Linear
from paddle.fluid.log_helper import get_logger
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
paddle.enable_static()
@ -231,10 +232,11 @@ class TestImperativeQat(unittest.TestCase):
before_save = lenet(test_img)
# save inference quantized model
path = "./mnist_infer_model"
path = "./qat_infer_model/mnist"
save_dir = "./qat_infer_model"
paddle.jit.save(
layer=lenet,
model_path=path,
path=path,
input_spec=[
paddle.static.InputSpec(
shape=[None, 1, 28, 28], dtype='float32')
@ -245,12 +247,12 @@ class TestImperativeQat(unittest.TestCase):
else:
place = core.CPUPlace()
exe = fluid.Executor(place)
[inference_program, feed_target_names, fetch_targets] = (
fluid.io.load_inference_model(
dirname=path,
executor=exe,
model_filename="__model__",
params_filename="__variables__"))
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(
dirname=save_dir,
executor=exe,
model_filename="mnist" + INFER_MODEL_SUFFIX,
params_filename="mnist" + INFER_PARAMS_SUFFIX)
after_save, = exe.run(inference_program,
feed={feed_target_names[0]: test_data},
fetch_list=fetch_targets)
@ -339,7 +341,7 @@ class TestImperativeQat(unittest.TestCase):
paddle.jit.save(
layer=lenet,
model_path="./dynamic_mnist",
path="./dynamic_mnist/model",
input_spec=[
paddle.static.InputSpec(
shape=[None, 1, 28, 28], dtype='float32')

@ -24,8 +24,8 @@ from . import learning_rate_scheduler
import warnings
from .. import core
from .base import guard
from paddle.fluid.dygraph.jit import SaveLoadConfig, deprecate_save_load_configs
from paddle.fluid.dygraph.io import _construct_program_holders, _construct_params_and_buffers, EXTRA_VAR_INFO_FILENAME
from paddle.fluid.dygraph.jit import _SaveLoadConfig
from paddle.fluid.dygraph.io import _construct_program_holders, _construct_params_and_buffers
__all__ = [
'save_dygraph',
@ -33,35 +33,23 @@ __all__ = [
]
# NOTE(chenweihang): deprecate load_dygraph's argument keep_name_table,
# ensure compatibility when user still use keep_name_table argument
def deprecate_keep_name_table(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
def __warn_and_build_configs__(keep_name_table):
warnings.warn(
"The argument `keep_name_table` has deprecated, please use `SaveLoadConfig.keep_name_table`.",
DeprecationWarning)
config = SaveLoadConfig()
config.keep_name_table = keep_name_table
return config
# deal with arg `keep_name_table`
if len(args) > 1 and isinstance(args[1], bool):
args = list(args)
args[1] = __warn_and_build_configs__(args[1])
# deal with kwargs
elif 'keep_name_table' in kwargs:
kwargs['config'] = __warn_and_build_configs__(kwargs[
'keep_name_table'])
kwargs.pop('keep_name_table')
else:
# do nothing
pass
def _parse_load_config(configs):
supported_configs = ['model_filename', 'params_filename', 'keep_name_table']
# input check
for key in configs:
if key not in supported_configs:
raise ValueError(
"The additional config (%s) of `paddle.fluid.load_dygraph` is not supported."
% (key))
return func(*args, **kwargs)
# construct inner config
inner_config = _SaveLoadConfig()
inner_config.model_filename = configs.get('model_filename', None)
inner_config.params_filename = configs.get('params_filename', None)
inner_config.keep_name_table = configs.get('keep_name_table', None)
return wrapper
return inner_config
@dygraph_only
@ -132,12 +120,12 @@ def save_dygraph(state_dict, model_path):
pickle.dump(model_dict, f, protocol=2)
# NOTE(chenweihang): load_dygraph will deprecated in future, we don't
# support new loading features for it
# TODO(qingqing01): remove dygraph_only to support loading static model.
# maybe need to unify the loading interface after 2.0 API is ready.
# @dygraph_only
@deprecate_save_load_configs
@deprecate_keep_name_table
def load_dygraph(model_path, config=None):
def load_dygraph(model_path, **configs):
'''
:api_attr: imperative
@ -152,10 +140,13 @@ def load_dygraph(model_path, config=None):
Args:
model_path(str) : The file prefix store the state_dict.
(The path should Not contain suffix '.pdparams')
config (SaveLoadConfig, optional): :ref:`api_imperative_jit_saveLoadConfig`
object that specifies additional configuration options, these options
are for compatibility with ``jit.save/io.save_inference_model`` formats.
Default None.
**configs (dict, optional): other save configuration options for compatibility. We do not
recommend using these configurations, if not necessary, DO NOT use them. Default None.
The following options are currently supported:
(1) model_filename (string): The inference model file name of the paddle 1.x ``save_inference_model``
save format. Default file name is :code:`__model__` .
(2) params_filename (string): The persistable variables file name of the paddle 1.x ``save_inference_model``
save format. No default file name, save variables separately by default.
Returns:
state_dict(dict) : the dict store the state_dict
@ -196,8 +187,7 @@ def load_dygraph(model_path, config=None):
opti_file_path = model_prefix + ".pdopt"
# deal with argument `config`
if config is None:
config = SaveLoadConfig()
config = _parse_load_config(configs)
if os.path.exists(params_file_path) or os.path.exists(opti_file_path):
# Load state dict by `save_dygraph` save format
@ -246,7 +236,6 @@ def load_dygraph(model_path, config=None):
persistable_var_dict = _construct_params_and_buffers(
model_prefix,
programs,
config.separate_params,
config.params_filename,
append_suffix=False)
@ -255,9 +244,9 @@ def load_dygraph(model_path, config=None):
for var_name in persistable_var_dict:
para_dict[var_name] = persistable_var_dict[var_name].numpy()
# if __variables.info__ exists, we can recover structured_name
var_info_path = os.path.join(model_prefix,
EXTRA_VAR_INFO_FILENAME)
# if *.info exists, we can recover structured_name
var_info_filename = str(config.params_filename) + ".info"
var_info_path = os.path.join(model_prefix, var_info_filename)
if os.path.exists(var_info_path):
with open(var_info_path, 'rb') as f:
extra_var_info = pickle.load(f)

@ -31,8 +31,10 @@ from paddle.fluid.dygraph.base import switch_to_static_graph
__all__ = ['TranslatedLayer']
VARIABLE_FILENAME = "__variables__"
EXTRA_VAR_INFO_FILENAME = "__variables.info__"
INFER_MODEL_SUFFIX = ".pdmodel"
INFER_PARAMS_SUFFIX = ".pdiparams"
INFER_PARAMS_INFO_SUFFIX = ".pdiparams.info"
LOADED_VAR_SUFFIX = "load"
PARAMETER_NAME_PREFIX = "param"
BUFFER_NAME_PREFIX = "buffer"
@ -424,11 +426,8 @@ def _load_persistable_vars_by_program(model_path,
return load_var_dict
def _load_persistable_vars(model_path,
var_info_path,
program_holder,
separate_params=False,
params_filename=None):
def _load_persistable_vars(model_path, var_info_path, program_holder,
params_filename):
# 1. load extra var info
with open(var_info_path, 'rb') as f:
extra_var_info = pickle.load(f)
@ -464,33 +463,22 @@ def _load_persistable_vars(model_path,
new_var = framework._varbase_creator(
name=new_name, persistable=True)
# load separate vars
if separate_params is True:
framework._dygraph_tracer().trace_op(
type='load',
inputs={},
outputs={'Out': new_var},
attrs={'file_path': os.path.join(model_path, name)})
new_var.stop_gradient = extra_var_info[name]['stop_gradient']
load_var_dict[new_name] = new_var
load_var_list.append(new_var)
# 3. load all vars
if separate_params is False:
if params_filename is not None:
var_file_path = os.path.join(model_path, params_filename)
else:
var_file_path = os.path.join(model_path, VARIABLE_FILENAME)
if not os.path.exists(var_file_path):
if len(extra_var_info) != 0:
raise ValueError("The model to be loaded is incomplete.")
else:
framework._dygraph_tracer().trace_op(
type='load_combine',
inputs={},
outputs={'Out': load_var_list},
attrs={'file_path': var_file_path})
assert params_filename is not None, "params_filename should not be None."
var_file_path = os.path.join(model_path, params_filename)
if not os.path.exists(var_file_path):
if len(extra_var_info) != 0:
raise ValueError("The model to be loaded is incomplete.")
else:
framework._dygraph_tracer().trace_op(
type='load_combine',
inputs={},
outputs={'Out': load_var_list},
attrs={'file_path': var_file_path})
return load_var_dict
@ -532,14 +520,13 @@ def _construct_program_holders(model_path, model_filename=None):
def _construct_params_and_buffers(model_path,
programs,
separate_params=False,
params_filename=None,
append_suffix=True):
var_info_path = os.path.join(model_path, EXTRA_VAR_INFO_FILENAME)
var_info_filename = str(params_filename) + ".info"
var_info_path = os.path.join(model_path, var_info_filename)
if os.path.exists(var_info_path):
var_dict = _load_persistable_vars(model_path, var_info_path,
programs['forward'], separate_params,
params_filename)
programs['forward'], params_filename)
else:
var_dict = _load_persistable_vars_by_program(
model_path, programs['forward'], params_filename)
@ -700,18 +687,16 @@ class TranslatedLayer(layers.Layer):
raise ValueError("There is no directory named '%s'" % model_path)
model_filename = None
params_filename = None
separate_params = False
if configs is not None:
model_filename = configs.model_filename
params_filename = configs.params_filename
separate_params = configs.separate_params
# 1. load program desc & construct _ProgramHolder
programs = _construct_program_holders(model_path, model_filename)
# 2. load layer parameters & buffers
persistable_vars = _construct_params_and_buffers(
model_path, programs, separate_params, params_filename)
persistable_vars = _construct_params_and_buffers(model_path, programs,
params_filename)
# 3. construct TranslatedLayer object
translated_layer = TranslatedLayer(programs, persistable_vars)

File diff suppressed because it is too large Load Diff

@ -14,7 +14,7 @@
from __future__ import print_function
from paddle.fluid.dygraph.jit import SaveLoadConfig
from paddle.fluid.dygraph.jit import _SaveLoadConfig
from paddle.fluid.dygraph.io import TranslatedLayer
@ -31,7 +31,7 @@ class StaticModelRunner(object):
"""
def __new__(cls, model_dir, model_filename=None, params_filename=None):
configs = SaveLoadConfig()
configs = _SaveLoadConfig()
if model_filename is not None:
configs.model_filename = model_filename
if params_filename is not None:

@ -28,11 +28,12 @@ class PredictorTools(object):
Paddle-Inference predictor
'''
def __init__(self, model_path, params_file, feeds_var):
def __init__(self, model_path, model_file, params_file, feeds_var):
'''
__init__
'''
self.model_path = model_path
self.model_file = model_file
self.params_file = params_file
self.feeds_var = feeds_var
@ -43,7 +44,7 @@ class PredictorTools(object):
'''
if os.path.exists(os.path.join(self.model_path, self.params_file)):
config = AnalysisConfig(
os.path.join(self.model_path, "__model__"),
os.path.join(self.model_path, self.model_file),
os.path.join(self.model_path, self.params_file))
else:
config = AnalysisConfig(os.path.join(self.model_path))

@ -12,13 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.fluid.dygraph.io import VARIABLE_FILENAME
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from bert_dygraph_model import PretrainModelLayer
from bert_utils import get_bert_config, get_feed_data_reader
@ -31,7 +33,10 @@ place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace(
SEED = 2020
STEP_NUM = 10
PRINT_STEP = 2
MODEL_SAVE_PATH = "./bert.inference.model"
MODEL_SAVE_DIR = "./inference"
MODEL_SAVE_PREFIX = "./inference/bert"
MODEL_FILENAME = "bert" + INFER_MODEL_SUFFIX
PARAMS_FILENAME = "bert" + INFER_PARAMS_SUFFIX
DY_STATE_DICT_SAVE_PATH = "./bert.dygraph"
@ -85,7 +90,7 @@ def train(bert_config, data_reader, to_static):
step_idx += 1
if step_idx == STEP_NUM:
if to_static:
fluid.dygraph.jit.save(bert, MODEL_SAVE_PATH)
fluid.dygraph.jit.save(bert, MODEL_SAVE_PREFIX)
else:
fluid.dygraph.save_dygraph(bert.state_dict(),
DY_STATE_DICT_SAVE_PATH)
@ -104,11 +109,15 @@ def train_static(bert_config, data_reader):
def predict_static(data):
paddle.enable_static()
exe = fluid.Executor(place)
# load inference model
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(
MODEL_SAVE_PATH, executor=exe, params_filename=VARIABLE_FILENAME)
MODEL_SAVE_DIR,
executor=exe,
model_filename=MODEL_FILENAME,
params_filename=PARAMS_FILENAME)
pred_res = exe.run(inference_program,
feed=dict(zip(feed_target_names, data)),
fetch_list=fetch_targets)
@ -143,7 +152,7 @@ def predict_dygraph(bert_config, data):
def predict_dygraph_jit(data):
with fluid.dygraph.guard(place):
bert = fluid.dygraph.jit.load(MODEL_SAVE_PATH)
bert = fluid.dygraph.jit.load(MODEL_SAVE_PREFIX)
bert.eval()
src_ids, pos_ids, sent_ids, input_mask, mask_label, mask_pos, labels = data
@ -155,7 +164,8 @@ def predict_dygraph_jit(data):
def predict_analysis_inference(data):
output = PredictorTools(MODEL_SAVE_PATH, VARIABLE_FILENAME, data)
output = PredictorTools(MODEL_SAVE_DIR, MODEL_FILENAME, PARAMS_FILENAME,
data)
out = output()
return out

@ -21,7 +21,7 @@ import paddle.fluid as fluid
from paddle.fluid import ParamAttr
from paddle.fluid.dygraph import to_variable
from paddle.fluid.dygraph import ProgramTranslator
from paddle.fluid.dygraph.io import VARIABLE_FILENAME
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from predictor_utils import PredictorTools
@ -422,7 +422,10 @@ class Args(object):
prop_boundary_ratio = 0.5
num_sample = 2
num_sample_perbin = 2
infer_dir = './bmn_infer_model'
model_save_dir = "./inference"
model_save_prefix = "./inference/bmn"
model_filename = "bmn" + INFER_MODEL_SUFFIX
params_filename = "bmn" + INFER_PARAMS_SUFFIX
dy_param_path = './bmn_dy_param'
@ -620,7 +623,7 @@ def train_bmn(args, place, to_static):
if batch_id == args.train_batch_num:
if to_static:
fluid.dygraph.jit.save(bmn, args.infer_dir)
fluid.dygraph.jit.save(bmn, args.model_save_prefix)
else:
fluid.dygraph.save_dygraph(bmn.state_dict(),
args.dy_param_path)
@ -735,13 +738,15 @@ class TestTrain(unittest.TestCase):
return pred_res
def predict_static(self, data):
paddle.enable_static()
exe = fluid.Executor(self.place)
# load inference model
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(
self.args.infer_dir,
self.args.model_save_dir,
executor=exe,
params_filename=VARIABLE_FILENAME)
model_filename=self.args.model_filename,
params_filename=self.args.params_filename)
pred_res = exe.run(inference_program,
feed={feed_target_names[0]: data},
fetch_list=fetch_targets)
@ -750,7 +755,7 @@ class TestTrain(unittest.TestCase):
def predict_dygraph_jit(self, data):
with fluid.dygraph.guard(self.place):
bmn = fluid.dygraph.jit.load(self.args.infer_dir)
bmn = fluid.dygraph.jit.load(self.args.model_save_prefix)
bmn.eval()
x = to_variable(data)
@ -760,7 +765,9 @@ class TestTrain(unittest.TestCase):
return pred_res
def predict_analysis_inference(self, data):
output = PredictorTools(self.args.infer_dir, VARIABLE_FILENAME, [data])
output = PredictorTools(self.args.model_save_dir,
self.args.model_filename,
self.args.params_filename, [data])
out = output()
return out

@ -26,7 +26,7 @@ import paddle.fluid as fluid
from paddle.fluid.dygraph import to_variable
from paddle.fluid.dygraph import Embedding, Linear, GRUUnit
from paddle.fluid.dygraph import declarative, ProgramTranslator
from paddle.fluid.dygraph.io import VARIABLE_FILENAME
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from predictor_utils import PredictorTools
@ -395,7 +395,10 @@ class Args(object):
base_learning_rate = 0.01
bigru_num = 2
print_steps = 1
model_save_dir = "./lac_model"
model_save_dir = "./inference"
model_save_prefix = "./inference/lac"
model_filename = "lac" + INFER_MODEL_SUFFIX
params_filename = "lac" + INFER_PARAMS_SUFFIX
dy_param_path = "./lac_dy_param"
@ -498,13 +501,11 @@ def do_train(args, to_static):
step += 1
# save inference model
if to_static:
configs = fluid.dygraph.jit.SaveLoadConfig()
configs.output_spec = [crf_decode]
fluid.dygraph.jit.save(
layer=model,
model_path=args.model_save_dir,
path=args.model_save_prefix,
input_spec=[words, length],
configs=configs)
output_spec=[crf_decode])
else:
fluid.dygraph.save_dygraph(model.state_dict(), args.dy_param_path)
@ -573,13 +574,15 @@ class TestLACModel(unittest.TestCase):
LAC model contains h_0 created in `__init__` that is necessary for inferring.
Load inference model to test it's ok for prediction.
"""
paddle.enable_static()
exe = fluid.Executor(self.place)
# load inference model
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(
self.args.model_save_dir,
executor=exe,
params_filename=VARIABLE_FILENAME)
model_filename=self.args.model_filename,
params_filename=self.args.params_filename)
words, targets, length = batch
pred_res = exe.run(
@ -592,7 +595,7 @@ class TestLACModel(unittest.TestCase):
def predict_dygraph_jit(self, batch):
words, targets, length = batch
with fluid.dygraph.guard(self.place):
model = fluid.dygraph.jit.load(self.args.model_save_dir)
model = fluid.dygraph.jit.load(self.args.model_save_prefix)
model.eval()
pred_res = model(to_variable(words), to_variable(length))
@ -602,8 +605,9 @@ class TestLACModel(unittest.TestCase):
def predict_analysis_inference(self, batch):
words, targets, length = batch
output = PredictorTools(self.args.model_save_dir, VARIABLE_FILENAME,
[words, length])
output = PredictorTools(self.args.model_save_dir,
self.args.model_filename,
self.args.params_filename, [words, length])
out = output()
return out

@ -25,7 +25,7 @@ from paddle.fluid.dygraph.base import switch_to_static_graph
from paddle.fluid.dygraph import to_variable
from paddle.fluid.dygraph.nn import Conv2D, Linear, Pool2D
from paddle.fluid.optimizer import AdamOptimizer
from paddle.fluid.dygraph.io import VARIABLE_FILENAME
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from predictor_utils import PredictorTools
@ -218,34 +218,39 @@ class TestMNISTWithToStatic(TestMNIST):
def check_jit_save_load(self, model, inputs, input_spec, to_static, gt_out):
if to_static:
infer_model_path = "./test_mnist_inference_model_by_jit_save"
configs = fluid.dygraph.jit.SaveLoadConfig()
configs.output_spec = [gt_out]
model_save_dir = "./inference"
model_save_prefix = "./inference/mnist"
model_filename = "mnist" + INFER_MODEL_SUFFIX
params_filename = "mnist" + INFER_PARAMS_SUFFIX
fluid.dygraph.jit.save(
layer=model,
model_path=infer_model_path,
path=model_save_prefix,
input_spec=input_spec,
configs=configs)
output_spec=[gt_out])
# load in static mode
static_infer_out = self.jit_load_and_run_inference_static(
infer_model_path, inputs)
model_save_dir, model_filename, params_filename, inputs)
self.assertTrue(np.allclose(gt_out.numpy(), static_infer_out))
# load in dygraph mode
dygraph_infer_out = self.jit_load_and_run_inference_dygraph(
infer_model_path, inputs)
model_save_prefix, inputs)
self.assertTrue(np.allclose(gt_out.numpy(), dygraph_infer_out))
# load in Paddle-Inference
predictor_infer_out = self.predictor_load_and_run_inference_analysis(
infer_model_path, inputs)
model_save_dir, model_filename, params_filename, inputs)
self.assertTrue(np.allclose(gt_out.numpy(), predictor_infer_out))
@switch_to_static_graph
def jit_load_and_run_inference_static(self, model_path, inputs):
def jit_load_and_run_inference_static(self, model_path, model_filename,
params_filename, inputs):
paddle.enable_static()
exe = fluid.Executor(self.place)
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(
dirname=model_path,
executor=exe,
params_filename=VARIABLE_FILENAME)
model_filename=model_filename,
params_filename=params_filename)
assert len(inputs) == len(feed_target_names)
results = exe.run(inference_program,
feed=dict(zip(feed_target_names, inputs)),
@ -258,8 +263,10 @@ class TestMNISTWithToStatic(TestMNIST):
pred = infer_net(inputs[0])
return pred.numpy()
def predictor_load_and_run_inference_analysis(self, model_path, inputs):
output = PredictorTools(model_path, VARIABLE_FILENAME, inputs)
def predictor_load_and_run_inference_analysis(
self, model_path, model_filename, params_filename, inputs):
output = PredictorTools(model_path, model_filename, params_filename,
inputs)
out = output()
return out

@ -20,7 +20,7 @@ from paddle.fluid.initializer import MSRA
from paddle.fluid.param_attr import ParamAttr
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, BatchNorm, Linear
from paddle.fluid.dygraph import declarative, ProgramTranslator
from paddle.fluid.dygraph.io import VARIABLE_FILENAME
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
import unittest
@ -439,7 +439,10 @@ class Args(object):
train_step = 10
place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
model_save_path = model + ".inference.model"
model_save_dir = "./inference"
model_save_prefix = "./inference/" + model
model_filename = model + INFER_MODEL_SUFFIX
params_filename = model + INFER_PARAMS_SUFFIX
dy_state_dict_save_path = model + ".dygraph"
@ -504,7 +507,7 @@ def train_mobilenet(args, to_static):
t_last = time.time()
if batch_id > args.train_step:
if to_static:
fluid.dygraph.jit.save(net, args.model_save_path)
fluid.dygraph.jit.save(net, args.model_save_prefix)
else:
fluid.dygraph.save_dygraph(net.state_dict(),
args.dy_state_dict_save_path)
@ -514,11 +517,15 @@ def train_mobilenet(args, to_static):
def predict_static(args, data):
paddle.enable_static()
exe = fluid.Executor(args.place)
# load inference model
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(
args.model_save_path, executor=exe, params_filename=VARIABLE_FILENAME)
args.model_save_dir,
executor=exe,
model_filename=args.model_filename,
params_filename=args.params_filename)
pred_res = exe.run(inference_program,
feed={feed_target_names[0]: data},
@ -545,7 +552,7 @@ def predict_dygraph(args, data):
def predict_dygraph_jit(args, data):
with fluid.dygraph.guard(args.place):
model = fluid.dygraph.jit.load(args.model_save_path)
model = fluid.dygraph.jit.load(args.model_save_prefix)
model.eval()
pred_res = model(data)
@ -554,7 +561,8 @@ def predict_dygraph_jit(args, data):
def predict_analysis_inference(args, data):
output = PredictorTools(args.model_save_path, VARIABLE_FILENAME, [data])
output = PredictorTools(args.model_save_dir, args.model_filename,
args.params_filename, [data])
out = output()
return out
@ -565,7 +573,9 @@ class TestMobileNet(unittest.TestCase):
def train(self, model_name, to_static):
self.args.model = model_name
self.args.model_save_path = model_name + ".inference.model"
self.args.model_save_prefix = "./inference/" + model_name
self.args.model_filename = model_name + INFER_MODEL_SUFFIX
self.args.params_filename = model_name + INFER_PARAMS_SUFFIX
self.args.dy_state_dict_save_path = model_name + ".dygraph"
out = train_mobilenet(self.args, to_static)
return out
@ -579,7 +589,9 @@ class TestMobileNet(unittest.TestCase):
def assert_same_predict(self, model_name):
self.args.model = model_name
self.args.model_save_path = model_name + ".inference.model"
self.args.model_save_prefix = "./inference/" + model_name
self.args.model_filename = model_name + INFER_MODEL_SUFFIX
self.args.params_filename = model_name + INFER_PARAMS_SUFFIX
self.args.dy_state_dict_save_path = model_name + ".dygraph"
local_random = np.random.RandomState(SEED)
image = local_random.random_sample([1, 3, 224, 224]).astype('float32')

@ -24,7 +24,7 @@ import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph import declarative, ProgramTranslator
from paddle.fluid.dygraph.nn import BatchNorm, Conv2D, Linear, Pool2D
from paddle.fluid.dygraph.io import VARIABLE_FILENAME
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from predictor_utils import PredictorTools
@ -38,7 +38,11 @@ batch_size = 2
epoch_num = 1
place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() \
else fluid.CPUPlace()
MODEL_SAVE_PATH = "./resnet.inference.model"
MODEL_SAVE_DIR = "./inference"
MODEL_SAVE_PREFIX = "./inference/resnet"
MODEL_FILENAME = "resnet" + INFER_MODEL_SUFFIX
PARAMS_FILENAME = "resnet" + INFER_PARAMS_SUFFIX
DY_STATE_DICT_SAVE_PATH = "./resnet.dygraph"
program_translator = ProgramTranslator()
@ -261,7 +265,7 @@ def train(to_static):
total_acc1.numpy() / total_sample, total_acc5.numpy() / total_sample, end_time-start_time))
if batch_id == 10:
if to_static:
fluid.dygraph.jit.save(resnet, MODEL_SAVE_PATH)
fluid.dygraph.jit.save(resnet, MODEL_SAVE_PREFIX)
else:
fluid.dygraph.save_dygraph(resnet.state_dict(),
DY_STATE_DICT_SAVE_PATH)
@ -287,10 +291,14 @@ def predict_dygraph(data):
def predict_static(data):
paddle.enable_static()
exe = fluid.Executor(place)
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(
MODEL_SAVE_PATH, executor=exe, params_filename=VARIABLE_FILENAME)
MODEL_SAVE_DIR,
executor=exe,
model_filename=MODEL_FILENAME,
params_filename=PARAMS_FILENAME)
pred_res = exe.run(inference_program,
feed={feed_target_names[0]: data},
@ -301,7 +309,7 @@ def predict_static(data):
def predict_dygraph_jit(data):
with fluid.dygraph.guard(place):
resnet = fluid.dygraph.jit.load(MODEL_SAVE_PATH)
resnet = fluid.dygraph.jit.load(MODEL_SAVE_PREFIX)
resnet.eval()
pred_res = resnet(data)
@ -310,7 +318,8 @@ def predict_dygraph_jit(data):
def predict_analysis_inference(data):
output = PredictorTools(MODEL_SAVE_PATH, VARIABLE_FILENAME, [data])
output = PredictorTools(MODEL_SAVE_DIR, MODEL_FILENAME, PARAMS_FILENAME,
[data])
out = output()
return out

@ -34,7 +34,11 @@ batch_size = 2
epoch_num = 1
place = paddle.CUDAPlace(0) if paddle.is_compiled_with_cuda() \
else paddle.CPUPlace()
MODEL_SAVE_PATH = "./resnet_v2.inference.model"
MODEL_SAVE_DIR = "./inference"
MODEL_SAVE_PREFIX = "./inference/resnet_v2"
MODEL_FILENAME = "resnet_v2" + paddle.fluid.dygraph.io.INFER_MODEL_SUFFIX
PARAMS_FILENAME = "resnet_v2" + paddle.fluid.dygraph.io.INFER_PARAMS_SUFFIX
DY_STATE_DICT_SAVE_PATH = "./resnet_v2.dygraph"
program_translator = paddle.jit.ProgramTranslator()
@ -255,7 +259,7 @@ def train(to_static):
total_acc1.numpy() / total_sample, total_acc5.numpy() / total_sample, end_time-start_time))
if batch_id == 10:
if to_static:
paddle.jit.save(resnet, MODEL_SAVE_PATH)
paddle.jit.save(resnet, MODEL_SAVE_PREFIX)
else:
paddle.fluid.dygraph.save_dygraph(resnet.state_dict(),
DY_STATE_DICT_SAVE_PATH)
@ -289,9 +293,10 @@ def predict_static(data):
exe = paddle.static.Executor(place)
[inference_program, feed_target_names,
fetch_targets] = paddle.static.load_inference_model(
MODEL_SAVE_PATH,
MODEL_SAVE_DIR,
executor=exe,
params_filename=paddle.fluid.dygraph.io.VARIABLE_FILENAME)
model_filename=MODEL_FILENAME,
params_filename=PARAMS_FILENAME)
pred_res = exe.run(inference_program,
feed={feed_target_names[0]: data},
@ -302,7 +307,7 @@ def predict_static(data):
def predict_dygraph_jit(data):
paddle.disable_static(place)
resnet = paddle.jit.load(MODEL_SAVE_PATH)
resnet = paddle.jit.load(MODEL_SAVE_PREFIX)
resnet.eval()
pred_res = resnet(data)
@ -313,8 +318,8 @@ def predict_dygraph_jit(data):
def predict_analysis_inference(data):
output = PredictorTools(MODEL_SAVE_PATH,
paddle.fluid.dygraph.io.VARIABLE_FILENAME, [data])
output = PredictorTools(MODEL_SAVE_DIR, MODEL_FILENAME, PARAMS_FILENAME,
[data])
out = output()
return out

@ -16,14 +16,14 @@ from __future__ import print_function
import os
import unittest
import numpy as np
import paddle.fluid as fluid
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.fluid.dygraph.jit import declarative
from paddle.fluid.dygraph.dygraph_to_static.partial_program import partial_program_from
from paddle.fluid.dygraph.io import EXTRA_VAR_INFO_FILENAME
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX, INFER_PARAMS_INFO_SUFFIX
SEED = 2020
@ -66,14 +66,13 @@ class TestDyToStaticSaveInferenceModel(unittest.TestCase):
adam.minimize(loss)
layer.clear_gradients()
# test for saving model in dygraph.guard
infer_model_dir = "./test_dy2stat_save_inference_model_in_guard"
configs = fluid.dygraph.jit.SaveLoadConfig()
configs.output_spec = [pred]
infer_model_prefix = "./test_dy2stat_inference_in_guard/model"
infer_model_dir = "./test_dy2stat_inference_in_guard"
fluid.dygraph.jit.save(
layer=layer,
model_path=infer_model_dir,
path=infer_model_prefix,
input_spec=[x],
configs=configs)
output_spec=[pred])
# Check the correctness of the inference
dygraph_out, _ = layer(x)
self.check_save_inference_model(layer, [x_data], dygraph_out.numpy())
@ -91,30 +90,30 @@ class TestDyToStaticSaveInferenceModel(unittest.TestCase):
expected_persistable_vars = set([p.name for p in model.parameters()])
infer_model_dir = "./test_dy2stat_save_inference_model"
configs = fluid.dygraph.jit.SaveLoadConfig()
if fetch is not None:
configs.output_spec = fetch
configs.separate_params = True
infer_model_prefix = "./test_dy2stat_inference/model"
infer_model_dir = "./test_dy2stat_inference"
model_filename = "model" + INFER_MODEL_SUFFIX
params_filename = "model" + INFER_PARAMS_SUFFIX
fluid.dygraph.jit.save(
layer=model,
model_path=infer_model_dir,
path=infer_model_prefix,
input_spec=feed if feed else None,
configs=configs)
saved_var_names = set([
filename for filename in os.listdir(infer_model_dir)
if filename != '__model__' and filename != EXTRA_VAR_INFO_FILENAME
])
self.assertEqual(saved_var_names, expected_persistable_vars)
output_spec=fetch if fetch else None)
# Check the correctness of the inference
infer_out = self.load_and_run_inference(infer_model_dir, inputs)
infer_out = self.load_and_run_inference(infer_model_dir, model_filename,
params_filename, inputs)
self.assertTrue(np.allclose(gt_out, infer_out))
def load_and_run_inference(self, model_path, inputs):
def load_and_run_inference(self, model_path, model_filename,
params_filename, inputs):
paddle.enable_static()
exe = fluid.Executor(place)
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(
dirname=model_path, executor=exe)
dirname=model_path,
executor=exe,
model_filename=model_filename,
params_filename=params_filename)
results = exe.run(inference_program,
feed=dict(zip(feed_target_names, inputs)),
fetch_list=fetch_targets)

@ -24,7 +24,7 @@ from paddle.fluid.dygraph.base import to_variable
from paddle.fluid.dygraph.nn import BatchNorm, Conv2D, Linear, Pool2D
from paddle.fluid.dygraph import declarative
from paddle.fluid.dygraph import ProgramTranslator
from paddle.fluid.dygraph.io import VARIABLE_FILENAME
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from predictor_utils import PredictorTools
@ -35,7 +35,10 @@ BATCH_SIZE = 8
EPOCH_NUM = 1
PRINT_STEP = 2
STEP_NUM = 10
MODEL_SAVE_PATH = "./se_resnet.inference.model"
MODEL_SAVE_DIR = "./inference"
MODEL_SAVE_PREFIX = "./inference/se_resnet"
MODEL_FILENAME = "se_resnet" + INFER_MODEL_SUFFIX
PARAMS_FILENAME = "se_resnet" + INFER_PARAMS_SUFFIX
DY_STATE_DICT_SAVE_PATH = "./se_resnet.dygraph"
place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda() \
@ -383,10 +386,10 @@ def train(train_reader, to_static):
step_idx += 1
if step_idx == STEP_NUM:
if to_static:
configs = fluid.dygraph.jit.SaveLoadConfig()
configs.output_spec = [pred]
fluid.dygraph.jit.save(se_resnext, MODEL_SAVE_PATH,
[img], configs)
fluid.dygraph.jit.save(
se_resnext,
MODEL_SAVE_PREFIX, [img],
output_spec=[pred])
else:
fluid.dygraph.save_dygraph(se_resnext.state_dict(),
DY_STATE_DICT_SAVE_PATH)
@ -414,10 +417,14 @@ def predict_dygraph(data):
def predict_static(data):
paddle.enable_static()
exe = fluid.Executor(place)
[inference_program, feed_target_names,
fetch_targets] = fluid.io.load_inference_model(
MODEL_SAVE_PATH, executor=exe, params_filename=VARIABLE_FILENAME)
MODEL_SAVE_DIR,
executor=exe,
model_filename=MODEL_FILENAME,
params_filename=PARAMS_FILENAME)
pred_res = exe.run(inference_program,
feed={feed_target_names[0]: data},
@ -428,7 +435,7 @@ def predict_static(data):
def predict_dygraph_jit(data):
with fluid.dygraph.guard(place):
se_resnext = fluid.dygraph.jit.load(MODEL_SAVE_PATH)
se_resnext = fluid.dygraph.jit.load(MODEL_SAVE_PREFIX)
se_resnext.eval()
pred_res = se_resnext(data)
@ -437,7 +444,8 @@ def predict_dygraph_jit(data):
def predict_analysis_inference(data):
output = PredictorTools(MODEL_SAVE_PATH, VARIABLE_FILENAME, [data])
output = PredictorTools(MODEL_SAVE_DIR, MODEL_FILENAME, PARAMS_FILENAME,
[data])
out = output()
return out

@ -32,6 +32,7 @@ STEP_NUM = 10
def train_static(args, batch_generator):
paddle.enable_static()
paddle.manual_seed(SEED)
paddle.framework.random._manual_program_seed(SEED)
train_prog = fluid.Program()

@ -277,7 +277,8 @@ def load_dygraph(model_path, keep_name_table=False):
To load python2 saved models in python3.
"""
try:
para_dict, opti_dict = fluid.load_dygraph(model_path, keep_name_table)
para_dict, opti_dict = fluid.load_dygraph(
model_path, keep_name_table=keep_name_table)
return para_dict, opti_dict
except UnicodeDecodeError:
warnings.warn(
@ -287,7 +288,7 @@ def load_dygraph(model_path, keep_name_table=False):
if six.PY3:
load_bak = pickle.load
pickle.load = partial(load_bak, encoding="latin1")
para_dict, opti_dict = fluid.load_dygraph(model_path,
keep_name_table)
para_dict, opti_dict = fluid.load_dygraph(
model_path, keep_name_table=keep_name_table)
pickle.load = load_bak
return para_dict, opti_dict

@ -43,15 +43,14 @@ class TestDirectory(unittest.TestCase):
'paddle.distributed.prepare_context', 'paddle.DataParallel',
'paddle.jit', 'paddle.jit.TracedLayer', 'paddle.jit.to_static',
'paddle.jit.ProgramTranslator', 'paddle.jit.TranslatedLayer',
'paddle.jit.save', 'paddle.jit.load', 'paddle.SaveLoadConfig',
'paddle.NoamDecay', 'paddle.PiecewiseDecay',
'paddle.NaturalExpDecay', 'paddle.ExponentialDecay',
'paddle.InverseTimeDecay', 'paddle.PolynomialDecay',
'paddle.CosineDecay', 'paddle.static.Executor',
'paddle.static.global_scope', 'paddle.static.scope_guard',
'paddle.static.append_backward', 'paddle.static.gradients',
'paddle.static.BuildStrategy', 'paddle.static.CompiledProgram',
'paddle.static.ExecutionStrategy',
'paddle.jit.save', 'paddle.jit.load', 'paddle.NoamDecay',
'paddle.PiecewiseDecay', 'paddle.NaturalExpDecay',
'paddle.ExponentialDecay', 'paddle.InverseTimeDecay',
'paddle.PolynomialDecay', 'paddle.CosineDecay',
'paddle.static.Executor', 'paddle.static.global_scope',
'paddle.static.scope_guard', 'paddle.static.append_backward',
'paddle.static.gradients', 'paddle.static.BuildStrategy',
'paddle.static.CompiledProgram', 'paddle.static.ExecutionStrategy',
'paddle.static.default_main_program',
'paddle.static.default_startup_program', 'paddle.static.Program',
'paddle.static.name_scope', 'paddle.static.program_guard',
@ -104,9 +103,7 @@ class TestDirectory(unittest.TestCase):
'paddle.imperative.TracedLayer', 'paddle.imperative.declarative',
'paddle.imperative.ProgramTranslator',
'paddle.imperative.TranslatedLayer', 'paddle.imperative.jit.save',
'paddle.imperative.jit.load',
'paddle.imperative.jit.SaveLoadConfig',
'paddle.imperative.NoamDecay'
'paddle.imperative.jit.load', 'paddle.imperative.NoamDecay'
'paddle.imperative.PiecewiseDecay',
'paddle.imperative.NaturalExpDecay',
'paddle.imperative.ExponentialDecay',

@ -917,11 +917,6 @@ class TestDygraphPtbRnn(unittest.TestCase):
state_dict = emb.state_dict()
fluid.save_dygraph(state_dict, os.path.join('saved_dy', 'emb_dy'))
para_state_dict, opti_state_dict = fluid.load_dygraph(
os.path.join('saved_dy', 'emb_dy'), True)
self.assertTrue(para_state_dict != None)
self.assertTrue(opti_state_dict == None)
para_state_dict, opti_state_dict = fluid.load_dygraph(
os.path.join('saved_dy', 'emb_dy'), keep_name_table=True)
self.assertTrue(para_state_dict != None)

File diff suppressed because it is too large Load Diff

@ -63,6 +63,8 @@ class TestLoadStateDictFromSaveInferenceModel(unittest.TestCase):
self.epoch_num = 1
self.batch_size = 128
self.batch_num = 10
# enable static mode
paddle.enable_static()
def train_and_save_model(self, only_params=False):
with new_program_scope():
@ -136,13 +138,12 @@ class TestLoadStateDictFromSaveInferenceModel(unittest.TestCase):
self.params_filename = None
orig_param_dict = self.train_and_save_model()
config = paddle.SaveLoadConfig()
config.separate_params = True
config.model_filename = self.model_filename
load_param_dict, _ = fluid.load_dygraph(self.save_dirname, config)
load_param_dict, _ = fluid.load_dygraph(
self.save_dirname, model_filename=self.model_filename)
self.check_load_state_dict(orig_param_dict, load_param_dict)
new_load_param_dict = paddle.load(self.save_dirname, config)
new_load_param_dict = paddle.load(
self.save_dirname, model_filename=self.model_filename)
self.check_load_state_dict(orig_param_dict, new_load_param_dict)
def test_load_with_param_filename(self):
@ -151,12 +152,12 @@ class TestLoadStateDictFromSaveInferenceModel(unittest.TestCase):
self.params_filename = "static_mnist.params"
orig_param_dict = self.train_and_save_model()
config = paddle.SaveLoadConfig()
config.params_filename = self.params_filename
load_param_dict, _ = fluid.load_dygraph(self.save_dirname, config)
load_param_dict, _ = fluid.load_dygraph(
self.save_dirname, params_filename=self.params_filename)
self.check_load_state_dict(orig_param_dict, load_param_dict)
new_load_param_dict = paddle.load(self.save_dirname, config)
new_load_param_dict = paddle.load(
self.save_dirname, params_filename=self.params_filename)
self.check_load_state_dict(orig_param_dict, new_load_param_dict)
def test_load_with_model_and_param_filename(self):
@ -165,13 +166,16 @@ class TestLoadStateDictFromSaveInferenceModel(unittest.TestCase):
self.params_filename = "static_mnist.params"
orig_param_dict = self.train_and_save_model()
config = paddle.SaveLoadConfig()
config.params_filename = self.params_filename
config.model_filename = self.model_filename
load_param_dict, _ = fluid.load_dygraph(self.save_dirname, config)
load_param_dict, _ = fluid.load_dygraph(
self.save_dirname,
params_filename=self.params_filename,
model_filename=self.model_filename)
self.check_load_state_dict(orig_param_dict, load_param_dict)
new_load_param_dict = paddle.load(self.save_dirname, config)
new_load_param_dict = paddle.load(
self.save_dirname,
params_filename=self.params_filename,
model_filename=self.model_filename)
self.check_load_state_dict(orig_param_dict, new_load_param_dict)
def test_load_state_dict_from_save_params(self):

@ -20,8 +20,8 @@ __all__ = [
]
__all__ += [
'grad', 'LayerList', 'load', 'save', 'SaveLoadConfig', 'to_variable',
'no_grad', 'DataParallel'
'grad', 'LayerList', 'load', 'save', 'to_variable', 'no_grad',
'DataParallel'
]
__all__ += [
@ -50,7 +50,6 @@ from ..fluid.dygraph.base import to_variable #DEFINE_ALIAS
from ..fluid.dygraph.base import grad #DEFINE_ALIAS
from .io import save
from .io import load
from ..fluid.dygraph.jit import SaveLoadConfig #DEFINE_ALIAS
from ..fluid.dygraph.parallel import DataParallel #DEFINE_ALIAS
from ..fluid.dygraph.learning_rate_scheduler import NoamDecay #DEFINE_ALIAS

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save