[Dy2Stat] Fix ProgramTranslator.save_inference_model API Doc (#24584)

As the title.
v1.8
Huihuang Zheng 5 years ago committed by GitHub
parent c4dd596de6
commit 5ff4535781
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -559,14 +559,14 @@ class ProgramTranslator(object):
Args: Args:
dirname (str): the directory to save the inference model. dirname (str): the directory to save the inference model.
feed (list[int], optional): the input variable indices of the saved feed (list[int], optional): the indices of the input variables of the
inference model. If None, all input variables of the dygraph functions which will be saved as input variables in
ProgramTranslator would be the inputs of the saved inference inference model. If None, all input variables of the dygraph function
model. Default None. would be the inputs of the saved inference model. Default None.
fetch (list[int], optional): the output variable indices of the fetch (list[int], optional): the indices of the returned variable of the
saved inference model. If None, all output variables of the dygraph functions which will be saved as output variables in
TracedLayer object would be the outputs of the saved inference inference model. If None, all output variables of the dygraph function
model. Default None. would be the outputs of the saved inference model. Default None.
Returns: Returns:
None None
Examples: Examples:
@ -599,12 +599,12 @@ class ProgramTranslator(object):
adam.minimize(loss) adam.minimize(loss)
net.clear_gradients() net.clear_gradients()
# Save inference model. # Save inference model.
# Note that fetch=[0] means we set 'y' as the inference output. # Note that fetch=[0] means we set 'z' as the inference output.
prog_trans = ProgramTranslator() prog_trans = ProgramTranslator()
prog_trans.save_inference_model("./dy2stat_infer_model", fetch=[0]) prog_trans.save_inference_model("./dy2stat_infer_model", fetch=[0])
# In this example, the inference model will be pruned based on input (x) and # In this example, the inference model will be pruned based on output (z).
# output (y). The pruned inference program is going to be saved in the folder # The pruned inference program is going to be saved in the folder
# "./dy2stat_infer_model" and parameters are going to be saved in separate # "./dy2stat_infer_model" and parameters are going to be saved in separate
# files in the folder. # files in the folder.
""" """

Loading…
Cancel
Save