Add labels to python files

pull/8950/head
JunYuLiu 4 years ago
parent fd9cd7afcd
commit 686fd88089

@ -233,25 +233,25 @@ def ms_function(fn=None, obj=None, input_signature=None):
Examples: Examples:
>>> from mindspore.ops import functional as F >>> from mindspore.ops import functional as F
>>> ...
>>> def tensor_add(x, y): >>> def tensor_add(x, y):
>>> z = F.tensor_add(x, y) ... z = F.tensor_add(x, y)
>>> return z ... return z
>>> ...
>>> @ms_function >>> @ms_function
>>> def tensor_add_with_dec(x, y): ... def tensor_add_with_dec(x, y):
>>> z = F.tensor_add(x, y) ... z = F.tensor_add(x, y)
>>> return z ... return z
>>> ...
>>> @ms_function(input_signature=(MetaTensor(mindspore.float32, (1, 1, 3, 3)), >>> @ms_function(input_signature=(MetaTensor(mindspore.float32, (1, 1, 3, 3)),
>>> MetaTensor(mindspore.float32, (1, 1, 3, 3)))) ... MetaTensor(mindspore.float32, (1, 1, 3, 3))))
>>> def tensor_add_with_sig(x, y): ... def tensor_add_with_sig(x, y):
>>> z = F.tensor_add(x, y) ... z = F.tensor_add(x, y)
>>> return z ... return z
>>> ...
>>> x = Tensor(np.ones([1, 1, 3, 3]).astype(np.float32)) >>> x = Tensor(np.ones([1, 1, 3, 3]).astype(np.float32))
>>> y = Tensor(np.ones([1, 1, 3, 3]).astype(np.float32)) >>> y = Tensor(np.ones([1, 1, 3, 3]).astype(np.float32))
>>> ...
>>> tensor_add_graph = ms_function(fn=tensor_add) >>> tensor_add_graph = ms_function(fn=tensor_add)
>>> out = tensor_add_graph(x, y) >>> out = tensor_add_graph(x, y)
>>> out = tensor_add_with_dec(x, y) >>> out = tensor_add_with_dec(x, y)

@ -51,7 +51,7 @@ class Tensor(Tensor_):
>>> assert isinstance(t1, Tensor) >>> assert isinstance(t1, Tensor)
>>> assert t1.shape == (1, 2, 3) >>> assert t1.shape == (1, 2, 3)
>>> assert t1.dtype == mindspore.float32 >>> assert t1.dtype == mindspore.float32
>>> ...
>>> # initialize a tensor with a float scalar >>> # initialize a tensor with a float scalar
>>> t2 = Tensor(0.1) >>> t2 = Tensor(0.1)
>>> assert isinstance(t2, Tensor) >>> assert isinstance(t2, Tensor)

@ -4541,6 +4541,9 @@ class GatherD(PrimitiveWithInfer):
Outputs: Outputs:
Tensor, the shape of tensor is :math:`(z_1, z_2, ..., z_N)`. Tensor, the shape of tensor is :math:`(z_1, z_2, ..., z_N)`.
Supported Platforms:
``Ascend`` ``GPU``
Examples: Examples:
>>> x = Tensor(np.array([[1, 2], [3, 4]]), mindspore.int32) >>> x = Tensor(np.array([[1, 2], [3, 4]]), mindspore.int32)
>>> index = Tensor(np.array([[0, 0], [1, 0]]), mindspore.int32) >>> index = Tensor(np.array([[0, 0], [1, 0]]), mindspore.int32)

@ -3599,6 +3599,9 @@ class AdamNoUpdateParam(PrimitiveWithInfer):
Tensor, whose shape and data type are the same with `gradient`, is a value that should be added to the Tensor, whose shape and data type are the same with `gradient`, is a value that should be added to the
parameter to be updated. parameter to be updated.
Supported Platforms:
``CPU``
Examples: Examples:
>>> import numpy as np >>> import numpy as np
>>> import mindspore as ms >>> import mindspore as ms
@ -4064,6 +4067,9 @@ class FusedSparseProximalAdagrad(PrimitiveWithInfer):
- **var** (Tensor) - A Tensor with shape (1,). - **var** (Tensor) - A Tensor with shape (1,).
- **accum** (Tensor) - A Tensor with shape (1,). - **accum** (Tensor) - A Tensor with shape (1,).
Supported Platforms:
``CPU``
Examples: Examples:
>>> import numpy as np >>> import numpy as np
>>> import mindspore.nn as nn >>> import mindspore.nn as nn
@ -4958,6 +4964,9 @@ class ApplyProximalAdagrad(PrimitiveWithInfer):
- **var** (Tensor) - The same shape and data type as `var`. - **var** (Tensor) - The same shape and data type as `var`.
- **accum** (Tensor) - The same shape and data type as `accum`. - **accum** (Tensor) - The same shape and data type as `accum`.
Supported Platforms:
``Ascend``
Examples: Examples:
>>> import numpy as np >>> import numpy as np
>>> import mindspore.nn as nn >>> import mindspore.nn as nn

Loading…
Cancel
Save