diff --git a/mindspore/nn/graph_kernels/graph_kernels.py b/mindspore/nn/graph_kernels/graph_kernels.py index 93e63dcd79..9d2e88d3b1 100644 --- a/mindspore/nn/graph_kernels/graph_kernels.py +++ b/mindspore/nn/graph_kernels/graph_kernels.py @@ -366,7 +366,8 @@ class ReLU(GraphKernel): >>> relu = ReLU() >>> result = relu(input_x) >>> print(result) - [[0, 4.0, 0.0], [2.0, 0.0, 9.0]] + [[0. 4. 0.] + [2. 0. 9.]] """ def __init__(self): super(ReLU, self).__init__() @@ -685,7 +686,7 @@ class LogSoftmax(GraphKernel): >>> log_softmax = LogSoftmax() >>> result = log_softmax(input_x) >>> print(result) - [-4.4519143, -3.4519143, -2.4519143, -1.4519144, -0.4519144] + [-4.4519143 -3.4519143 -2.4519143 -1.4519144 -0.4519144] """ def __init__(self, axis=-1): @@ -743,7 +744,7 @@ class Tanh(GraphKernel): >>> tanh = Tanh() >>> result = tanh(input_x) >>> print(result) - [0.7615941, 0.9640276, 0.9950548, 0.9993293, 0.99990916] + [0.7615941 0.9640276 0.9950548 0.9993293 0.99990916] """ def __init__(self): super(Tanh, self).__init__() diff --git a/mindspore/nn/layer/activation.py b/mindspore/nn/layer/activation.py index 60d80b0503..97bf36db28 100644 --- a/mindspore/nn/layer/activation.py +++ b/mindspore/nn/layer/activation.py @@ -264,7 +264,7 @@ class LeakyReLU(Cell): >>> output = leaky_relu(input_x) >>> print(output) [[-0.2 4. -1.6] - [ 2 -1. 9. ]] + [ 2. -1. 9. ]] """ def __init__(self, alpha=0.2): diff --git a/mindspore/nn/layer/basic.py b/mindspore/nn/layer/basic.py index 996e5513d9..50a94fa558 100644 --- a/mindspore/nn/layer/basic.py +++ b/mindspore/nn/layer/basic.py @@ -748,8 +748,8 @@ class Triu(Cell): >>> triu = nn.Triu() >>> result = triu(x) >>> print(result) - [[1 2] - [0 4]] + [[1 0] + [3 4]] """ def __init__(self): super(Triu, self).__init__() @@ -796,8 +796,8 @@ class MatrixDiag(Cell): >>> matrix_diag = nn.MatrixDiag() >>> output = matrix_diag(x) >>> print(output) - [[1. 0.] - [0. -1.]] + [[ 1. 0.] + [ 0. -1.]] """ def __init__(self): super(MatrixDiag, self).__init__() diff --git a/mindspore/nn/layer/image.py b/mindspore/nn/layer/image.py index 3a488d69c6..7390c3cce6 100644 --- a/mindspore/nn/layer/image.py +++ b/mindspore/nn/layer/image.py @@ -398,7 +398,7 @@ class PSNR(Cell): >>> img2 = Tensor(np.random.random((1,3,16,16))) >>> output = net(img1, img2) >>> print(output) - [7.7229595] + [7.915369] """ def __init__(self, max_val=1.0): super(PSNR, self).__init__() diff --git a/mindspore/nn/layer/pooling.py b/mindspore/nn/layer/pooling.py index c28330e7de..018d7d194b 100644 --- a/mindspore/nn/layer/pooling.py +++ b/mindspore/nn/layer/pooling.py @@ -182,7 +182,7 @@ class MaxPool1d(_PoolNd): >>> x = Tensor(np.random.randint(0, 10, [1, 2, 4]), mindspore.float32) >>> output = max_pool(x) >>> result = output.shape - >>> printI(result) + >>> print(result) (1, 2, 2) """ diff --git a/mindspore/nn/learning_rate_schedule.py b/mindspore/nn/learning_rate_schedule.py index 9f7cf77834..bad917dbac 100644 --- a/mindspore/nn/learning_rate_schedule.py +++ b/mindspore/nn/learning_rate_schedule.py @@ -148,7 +148,7 @@ class NaturalExpDecayLR(LearningRateSchedule): >>> natural_exp_decay_lr = NaturalExpDecayLR(learning_rate, decay_rate, decay_steps, True) >>> result = natural_exp_decay_lr(global_step) >>> print(result) - 0.016529894 + 0.1 """ def __init__(self, learning_rate, decay_rate, decay_steps, is_stair=False): super(NaturalExpDecayLR, self).__init__() diff --git a/mindspore/nn/loss/loss.py b/mindspore/nn/loss/loss.py index 2ebe54d475..5201cf7eba 100644 --- a/mindspore/nn/loss/loss.py +++ b/mindspore/nn/loss/loss.py @@ -599,7 +599,7 @@ class CosineEmbeddingLoss(_Loss): >>> cosine_embedding_loss = nn.CosineEmbeddingLoss() >>> output = cosine_embedding_loss(x1, x2, y) >>> print(output) - [0.0003426075] + 0.0003426075 """ def __init__(self, margin=0.0, reduction="mean"): super(CosineEmbeddingLoss, self).__init__(reduction) diff --git a/mindspore/nn/metrics/accuracy.py b/mindspore/nn/metrics/accuracy.py index 49ab7a5c9e..46652759ec 100644 --- a/mindspore/nn/metrics/accuracy.py +++ b/mindspore/nn/metrics/accuracy.py @@ -42,7 +42,7 @@ class Accuracy(EvaluationBase): >>> metric.update(x, y) >>> accuracy = metric.eval() >>> print(accuracy) - 0.66666666 + 0.6666666666666666 """ def __init__(self, eval_type='classification'): super(Accuracy, self).__init__(eval_type) diff --git a/mindspore/ops/composite/random_ops.py b/mindspore/ops/composite/random_ops.py index b48e9aea25..2a97467e5b 100644 --- a/mindspore/ops/composite/random_ops.py +++ b/mindspore/ops/composite/random_ops.py @@ -51,8 +51,8 @@ def normal(shape, mean, stddev, seed=None): >>> stddev = Tensor(1.0, mstype.float32) >>> output = C.normal(shape, mean, stddev, seed=5) >>> print(output) - [[1.0996436 0.44371283 0.11127508 -0.48055804] - [0.31989878 -1.0644426 1.5076542 1.2290289 ]] + [[ 1.0996436 0.44371283 0.11127508 -0.48055804] + [ 0.31989878 -1.0644426 1.5076542 1.2290289 ]] """ mean_dtype = F.dtype(mean) stddev_dtype = F.dtype(stddev) diff --git a/mindspore/ops/operations/debug_ops.py b/mindspore/ops/operations/debug_ops.py index f35f905232..f10291df51 100644 --- a/mindspore/ops/operations/debug_ops.py +++ b/mindspore/ops/operations/debug_ops.py @@ -63,7 +63,7 @@ class ScalarSummary(PrimitiveWithInfer): ... self.summary(name, x) ... x = self.add(x, y) ... return x - ... + ... """ @prim_attr_register diff --git a/mindspore/ops/operations/math_ops.py b/mindspore/ops/operations/math_ops.py index fe8d85fd0f..3840d6c361 100644 --- a/mindspore/ops/operations/math_ops.py +++ b/mindspore/ops/operations/math_ops.py @@ -613,7 +613,7 @@ class ReduceProd(_Reduce): >>> input_x = Tensor(np.random.randn(3, 4, 5, 6).astype(np.float32)) >>> op = ops.ReduceProd(keep_dims=True) >>> output = op(input_x, 1) - >>> reuslt = output.shape + >>> result = output.shape >>> print(result) (3, 1, 5, 6) """ @@ -2513,8 +2513,9 @@ class Equal(_LogicBinaryOp): Examples: >>> input_x = Tensor(np.array([1, 2, 3]), mindspore.float32) >>> equal = ops.Equal() - >>> equal(input_x, 2.0) - [False, True, False] + >>> output = equal(input_x, 2.0) + >>> print(output) + Tensor(shape=[3], dtype=Bool, value= [False, True, False]) >>> >>> input_x = Tensor(np.array([1, 2, 3]), mindspore.int32) >>> input_y = Tensor(np.array([1, 2, 4]), mindspore.int32) diff --git a/mindspore/ops/operations/nn_ops.py b/mindspore/ops/operations/nn_ops.py index 587499e9fb..f59cec67a2 100644 --- a/mindspore/ops/operations/nn_ops.py +++ b/mindspore/ops/operations/nn_ops.py @@ -6124,7 +6124,7 @@ class CTCGreedyDecoder(PrimitiveWithInfer): containing sequence log-probability, has the same type as `inputs`. Examples: - >>>class CTCGreedyDecoderNet(nn.Cell): + >>> class CTCGreedyDecoderNet(nn.Cell): ... def __init__(self): ... super(CTCGreedyDecoderNet, self).__init__() ... self.ctc_greedy_decoder = P.CTCGreedyDecoder() diff --git a/mindspore/ops/operations/other_ops.py b/mindspore/ops/operations/other_ops.py index 88dff291bb..d4f64c269b 100644 --- a/mindspore/ops/operations/other_ops.py +++ b/mindspore/ops/operations/other_ops.py @@ -318,9 +318,9 @@ class IOU(PrimitiveWithInfer): >>> gt_boxes = Tensor(np.random.randint(1.0, 5.0, [3, 4]), mindspore.float16) >>> output = iou(anchor_boxes, gt_boxes) >>> print(output) - [[65000. 65500. -0.] - [65000. 65500. -0.] - [ 0. 0. 0.]] + [[65500. 65500. 65500.] + [ -0. -0. -0.] + [ -0. -0. -0.]] """ @@ -524,7 +524,7 @@ class ConfusionMatrix(PrimitiveWithInfer): >>> predictions = Tensor([1, 2, 1, 3], mindspore.int32) >>> output = confusion_matrix(labels, predictions) >>> print(output) - [[0 1 0 0 + [[0 1 0 0] [0 1 1 0] [0 0 0 0] [0 0 0 1]] diff --git a/mindspore/ops/operations/random_ops.py b/mindspore/ops/operations/random_ops.py index 5c7070c082..a1db4903c5 100644 --- a/mindspore/ops/operations/random_ops.py +++ b/mindspore/ops/operations/random_ops.py @@ -420,7 +420,7 @@ class RandomChoiceWithMask(PrimitiveWithInfer): >>> print(result) (256, 2) >>> result = output_mask.shape - >>> print(reuslt) + >>> print(result) (256,) """ @@ -474,16 +474,16 @@ class RandomCategorical(PrimitiveWithInfer): >>> net = Net(8) >>> output = net(Tensor(x)) >>> print(output) - [[0 2 1 3 4 2 0 2] - [0 2 1 3 4 2 0 2] - [0 2 1 3 4 2 0 2] + [[0 2 0 3 4 2 0 2] [0 2 1 3 4 2 0 2] [0 2 0 3 4 2 0 2] - [0 2 1 3 4 3 0 3] [0 2 1 3 4 2 0 2] [0 2 1 3 4 2 0 2] [0 2 1 3 4 2 0 2] - [0 2 0 3 4 2 0 2]] + [0 2 0 3 4 2 0 2] + [0 2 0 3 4 2 0 2] + [0 2 1 3 4 3 0 3] + [0 2 1 3 4 2 0 2]] """ @prim_attr_register