remove is_mem_optimized in Program, test=develop (#19307)

padding_in_crf
Zeng Jinle 6 years ago committed by GitHub
parent 97d1db1874
commit 561232c25a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -2854,10 +2854,6 @@ class Program(object):
self._use_hierarchical_allreduce = False
self._hierarchical_allreduce_inter_nranks = 0
# @deprecated(the python memory optimize transpiler is deprecated)
# whether the program is optimized by memory_optimize_transpiler
self.__is_mem_optimized = False
# if this program has been optimized by distributed optimizer
# fleet_opt will be given a value
self._fleet_opt = None
@ -2869,16 +2865,6 @@ class Program(object):
# appending gradients times
self._appending_grad_times = 0
@property
def _is_mem_optimized(self):
# if the program is optimized, operator input/outputs
# maybe same, which conflict with save_inference_model.
return self.__is_mem_optimized
@_is_mem_optimized.setter
def _is_mem_optimized(self, target):
self.__is_mem_optimized = target
@property
def _op_role(self):
"""

@ -1028,13 +1028,6 @@ def save_inference_model(dirname,
if main_program is None:
main_program = default_main_program()
if main_program._is_mem_optimized:
warnings.warn(
"save_inference_model must put before you call memory_optimize. \
the memory_optimize will modify the original program, \
is not suitable for saving inference model \
we save the original program as inference model.",
RuntimeWarning)
elif not isinstance(main_program, Program):
raise TypeError("program should be as Program type or None")

Loading…
Cancel
Save