diff --git a/mindspore/common/api.py b/mindspore/common/api.py index 0d7578fd5e..8443ef6a46 100644 --- a/mindspore/common/api.py +++ b/mindspore/common/api.py @@ -232,6 +232,8 @@ def ms_function(fn=None, obj=None, input_signature=None): equal to the case when `fn` is not None. Examples: + >>> from mindspore.ops import functional as F + >>> >>> def tensor_add(x, y): >>> z = F.tensor_add(x, y) >>> return z diff --git a/mindspore/common/seed.py b/mindspore/common/seed.py index fd17cd5b35..bd797d69e5 100644 --- a/mindspore/common/seed.py +++ b/mindspore/common/seed.py @@ -58,6 +58,8 @@ def set_seed(seed): TypeError: If seed isn't a int. Examples: + >>> from mindspore.ops import composite as C + >>> >>> # 1. If global seed is not set, numpy.random and initializer will choose a random seed: >>> np_1 = np.random.normal(0, 1, [1]).astype(np.float32) # A1 >>> np_1 = np.random.normal(0, 1, [1]).astype(np.float32) # A2 diff --git a/mindspore/train/model.py b/mindspore/train/model.py index 63f3b73ddc..59f39095f0 100755 --- a/mindspore/train/model.py +++ b/mindspore/train/model.py @@ -105,10 +105,11 @@ class Model: >>> return out >>> >>> net = Net() - >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) + >>> loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True) >>> optim = Momentum(params=net.trainable_params(), learning_rate=0.1, momentum=0.9) >>> model = Model(net, loss_fn=loss, optimizer=optim, metrics=None) - >>> dataset = get_dataset() + >>> # For details about how to build the dataset, please refer to the tutorial document on the official website. + >>> dataset = create_custom_dataset() >>> model.train(2, dataset) """ @@ -514,9 +515,6 @@ class Model: When setting pynative mode or CPU, the training process will be performed with dataset not sink. Note: - If dataset_sink_mode is True, epoch of training should be equal to the count of repeat - operation in dataset processing. Otherwise, errors could occur since the amount of data - is not equal to the required amount of training . If dataset_sink_mode is True, data will be sent to device. If device is Ascend, features of data will be transferred one by one. The limitation of data transmission per time is 256M. If sink_size > 0, each epoch the dataset can be traversed unlimited times until you get sink_size @@ -541,7 +539,7 @@ class Model: If dataset_sink_mode is False, set sink_size as invalid. Default: -1. Examples: - >>> dataset = get_dataset() + >>> dataset = create_custom_dataset() >>> net = Net() >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) >>> loss_scale_manager = FixedLossScaleManager() @@ -659,7 +657,7 @@ class Model: Dict, which returns the loss value and metrics values for the model in the test mode. Examples: - >>> dataset = get_dataset() + >>> dataset = create_custom_dataset() >>> net = Net() >>> loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True) >>> model = Model(net, loss_fn=loss, optimizer=None, metrics={'acc'})