!13110 update comments

From: @yepei6
Reviewed-by: @zh_qh
Signed-off-by: @zh_qh
pull/13110/MERGE
mindspore-ci-bot 4 years ago committed by Gitee
commit d6ac315c85

@ -43,7 +43,7 @@ class Tensor(Tensor_):
output tensor will be as same as the `input_data`. Default: None.
shape (Union[tuple, list, int]): A list of integers, a tuple of integers or an integer as the shape of
output. Default: None.
init (class:'Initializer'): the information of init data.
init (:class:`Initializer`): the information of init data.
'init' is used for delayed initialization in parallel mode. Usually, it is not recommended to
use 'init' interface to initialize parameters in other conditions. If 'init' interface is used
to initialize parameters, the `init_data` API need to be called to convert `Tensor` to the actual data.

@ -90,7 +90,7 @@ class _MultiCompatibleRotatingFileHandler(RotatingFileHandler):
self.stream.close()
self.stream = None
# Attain an exclusive lock with bloking mode by `fcntl` module.
# Attain an exclusive lock with blocking mode by `fcntl` module.
with open(self.baseFilename, 'a') as file_pointer:
if platform.system() != "Windows":
fcntl.lockf(file_pointer.fileno(), fcntl.LOCK_EX)
@ -367,7 +367,7 @@ def get_log_config():
>>> import os
>>> os.environ['GLOG_v'] = '1'
>>> os.environ['GLOG_logtostderr'] = '0'
>>> os.environ['GLOG_log_dir'] = '/var/log/mindspore'
>>> os.environ['GLOG_log_dir'] = '/var/log'
>>> os.environ['logger_maxBytes'] = '5242880'
>>> os.environ['logger_backupCount'] = '10'
>>> from mindspore import log as logger
@ -430,13 +430,13 @@ def _find_caller(stack_info=False, stacklevel=1):
def _get_stack_info(frame):
"""
Get the stack informations.
Get the stack information.
Args:
frame(frame): the frame requiring informations.
frame(frame): the frame requiring information.
Returns:
str, the string of the stack informations.
str, the string of the stack information.
"""
sinfo = None
stack_prefix = 'Stack (most recent call last):\n'

@ -125,8 +125,8 @@ def connect_network_with_dataset(network, dataset_helper):
return network
if not hasattr(dataset, '__me_inited__') and (context.get_context("device_target") == "Ascend" or \
context.get_context("device_target") == "GPU") and not context.get_context("enable_ge"):
if not hasattr(dataset, '__me_inited__') and context.get_context("device_target") in ("Ascend", "GPU")\
and not context.get_context("enable_ge"):
dataset.__me_inited__ = True
dataset_types, dataset_shapes = dataset_helper.types_shapes()

@ -812,6 +812,7 @@ class Model:
>>> import numpy as np
>>> import mindspore as ms
>>> from mindspore import Model, context, Tensor
>>> from mindspore.context import ParallelMode
>>>
>>> context.set_context(mode=context.GRAPH_MODE)
>>> context.set_auto_parallel_context(full_batch=True, parallel_mode=ParallelMode.SEMI_AUTO_PARALLEL)

Loading…
Cancel
Save