diff --git a/mindspore/nn/layer/lstm.py b/mindspore/nn/layer/lstm.py index 06ad5154ab..86d0e8e44a 100755 --- a/mindspore/nn/layer/lstm.py +++ b/mindspore/nn/layer/lstm.py @@ -63,7 +63,7 @@ class LSTM(Cell): num_layers (int): Number of layers of stacked LSTM . Default: 1. has_bias (bool): Specifies whether has bias `b_ih` and `b_hh`. Default: True. batch_first (bool): Specifies whether the first dimension of input is batch_size. Default: False. - dropout (float): If not 0, append `Dropout` layer on the outputs of each + dropout (float, int): If not 0, append `Dropout` layer on the outputs of each LSTM layer except the last layer. Default 0. The range of dropout is [0.0, 1.0]. bidirectional (bool): Specifies whether this is a bidirectional LSTM. If set True, number of directions will be 2 otherwise number of directions is 1. Default: False. diff --git a/mindspore/ops/operations/control_ops.py b/mindspore/ops/operations/control_ops.py index 3f8d50857a..736855a046 100644 --- a/mindspore/ops/operations/control_ops.py +++ b/mindspore/ops/operations/control_ops.py @@ -154,14 +154,6 @@ class Merge(PrimitiveWithInfer): raise NotImplementedError def infer_shape(self, inputs): - validator.check_integer('inputs len', len(inputs), 0, Rel.GT, self.name) - input_0 = inputs[0] - - for i in range(1, len(inputs)): - if inputs[i] != input_0: - raise ValueError(f"For \'{self.name}\', the shape of {i}th input should be same as " - f"first input {input_0}, but got {inputs[i]}.") - return (inputs[0], [1]) def infer_dtype(self, inputs):