!10191 update the example of some operations.

From: @wangshuide2020
Reviewed-by: @liangchenghui,@wuxuejian
Signed-off-by: @liangchenghui
pull/10191/MERGE
mindspore-ci-bot 4 years ago committed by Gitee
commit 2e65c5de5c

@ -343,6 +343,8 @@ class LSTMCell(Cell):
>>> c = Tensor(np.ones([1, 3, 12]).astype(np.float32)) >>> c = Tensor(np.ones([1, 3, 12]).astype(np.float32))
>>> w = Tensor(np.ones([1152, 1, 1]).astype(np.float32)) >>> w = Tensor(np.ones([1152, 1, 1]).astype(np.float32))
>>> output, h, c, _, _ = net(input, h, c, w) >>> output, h, c, _, _ = net(input, h, c, w)
>>> print(output.shape)
(3, 5, 12)
""" """
def __init__(self, def __init__(self,

@ -59,10 +59,10 @@ def repeat_elements(x, rep, axis=0):
Repeat elements of a tensor along an axis, like np.repeat. Repeat elements of a tensor along an axis, like np.repeat.
Args: Args:
- **x** (Tensor) - The tensor to repeat values for. Must be of type: float16, x (Tensor): The tensor to repeat values for. Must be of type: float16,
float32, int8, uint8, int16, int32, or int64. float32, int8, uint8, int16, int32, or int64.
- **rep** (int) - The number of times to repeat, must be positive, required. rep (int): The number of times to repeat, must be positive, required.
- **axis** (int) - The axis along which to repeat, default 0. axis (int): The axis along which to repeat, default 0.
Outputs: Outputs:
One tensor with values repeated along the specified axis. If x has shape One tensor with values repeated along the specified axis. If x has shape

@ -142,16 +142,19 @@ class AllGather(PrimitiveWithInfer):
``Ascend`` ``GPU`` ``Ascend`` ``GPU``
Examples: Examples:
>>> # This example should be run with two devices. Refer to the tutorial > Distirbuted Training on mindspore.cn.
>>> import numpy as np
>>> import mindspore.ops.operations as ops >>> import mindspore.ops.operations as ops
>>> import mindspore.nn as nn >>> import mindspore.nn as nn
>>> from mindspore.communication import init >>> from mindspore.communication import init
>>> from mindspore import Tensor >>> from mindspore import Tensor, context
>>> >>>
>>> context.set_context(mode=context.GRAPH_MODE)
>>> init() >>> init()
... class Net(nn.Cell): ... class Net(nn.Cell):
... def __init__(self): ... def __init__(self):
... super(Net, self).__init__() ... super(Net, self).__init__()
... self.allgather = ops.AllGather(group="nccl_world_group") ... self.allgather = ops.AllGather()
... ...
... def construct(self, x): ... def construct(self, x):
... return self.allgather(x) ... return self.allgather(x)
@ -160,6 +163,10 @@ class AllGather(PrimitiveWithInfer):
>>> net = Net() >>> net = Net()
>>> output = net(input_) >>> output = net(input_)
>>> print(output) >>> print(output)
[[1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1.]
[1. 1. 1. 1. 1. 1. 1. 1.]]
""" """
@prim_attr_register @prim_attr_register
@ -255,16 +262,18 @@ class ReduceScatter(PrimitiveWithInfer):
ValueError: If the first dimension of the input cannot be divided by the rank size. ValueError: If the first dimension of the input cannot be divided by the rank size.
Supported Platforms: Supported Platforms:
``GPU`` ``Ascend`` ``GPU``
Examples: Examples:
>>> from mindspore import Tensor >>> # This example should be run with two devices. Refer to the tutorial > Distirbuted Training on mindspore.cn.
>>> from mindspore import Tensor, context
>>> from mindspore.communication import init >>> from mindspore.communication import init
>>> from mindspore.ops.operations.comm_ops import ReduceOp >>> from mindspore.ops.operations.comm_ops import ReduceOp
>>> import mindspore.nn as nn >>> import mindspore.nn as nn
>>> import mindspore.ops.operations as ops >>> import mindspore.ops.operations as ops
>>> import numpy as np >>> import numpy as np
>>> >>>
>>> context.set_context(mode=context.GRAPH_MODE)
>>> init() >>> init()
>>> class Net(nn.Cell): >>> class Net(nn.Cell):
... def __init__(self): ... def __init__(self):
@ -278,6 +287,10 @@ class ReduceScatter(PrimitiveWithInfer):
>>> net = Net() >>> net = Net()
>>> output = net(input_) >>> output = net(input_)
>>> print(output) >>> print(output)
[[2. 2. 2. 2. 2. 2. 2. 2.]
[2. 2. 2. 2. 2. 2. 2. 2.]
[2. 2. 2. 2. 2. 2. 2. 2.]
[2. 2. 2. 2. 2. 2. 2. 2.]]
""" """
@prim_attr_register @prim_attr_register

@ -34,7 +34,7 @@ class ControlDepend(Primitive):
This operation does not work in `PYNATIVE_MODE`. This operation does not work in `PYNATIVE_MODE`.
Args: Args:
depend_mode (int): Use 0 for a normal dependency relation and 1 for a user-defined dependency relation. depend_mode (int): Use 0 for a normal dependency relation and 1 for a user-defined dependency relation.
Default: 0. Default: 0.
Inputs: Inputs:
- **src** (Any) - The source input. It can be a tuple of operations output or a single operation output. We do - **src** (Any) - The source input. It can be a tuple of operations output or a single operation output. We do
@ -102,7 +102,7 @@ class GeSwitch(PrimitiveWithInfer):
Examples: Examples:
>>> class Net(nn.Cell): >>> class Net(nn.Cell):
... def __init__(self): ... def __init__(self):
... super(Net, self).__init__() ... super(Net, self).__init__()
... self.square = ops.Square() ... self.square = ops.Square()
... self.add = ops.TensorAdd() ... self.add = ops.TensorAdd()

@ -350,7 +350,12 @@ class Print(PrimitiveWithInfer):
>>> x = Tensor(np.ones([2, 1]).astype(np.int32)) >>> x = Tensor(np.ones([2, 1]).astype(np.int32))
>>> y = Tensor(np.ones([2, 2]).astype(np.int32)) >>> y = Tensor(np.ones([2, 2]).astype(np.int32))
>>> net = PrintDemo() >>> net = PrintDemo()
>>> output = net(x, y) >>> result = net(x, y)
Print Tensor x and Tensor y:
[[1]
[1]]
[[1 1]
[1 1]]
""" """
@prim_attr_register @prim_attr_register

Loading…
Cancel
Save