!1258 fixed comment of nn.LSTM

Merge pull request !1258 from jiangjinsheng/issue_lstm_dropout
pull/1258/MERGE
mindspore-ci-bot 5 years ago committed by Gitee
commit 2fc8286d23

@ -63,7 +63,7 @@ class LSTM(Cell):
num_layers (int): Number of layers of stacked LSTM . Default: 1.
has_bias (bool): Specifies whether has bias `b_ih` and `b_hh`. Default: True.
batch_first (bool): Specifies whether the first dimension of input is batch_size. Default: False.
dropout (float): If not 0, append `Dropout` layer on the outputs of each
dropout (float, int): If not 0, append `Dropout` layer on the outputs of each
LSTM layer except the last layer. Default 0. The range of dropout is [0.0, 1.0].
bidirectional (bool): Specifies whether this is a bidirectional LSTM. If set True,
number of directions will be 2 otherwise number of directions is 1. Default: False.

@ -154,14 +154,6 @@ class Merge(PrimitiveWithInfer):
raise NotImplementedError
def infer_shape(self, inputs):
validator.check_integer('inputs len', len(inputs), 0, Rel.GT, self.name)
input_0 = inputs[0]
for i in range(1, len(inputs)):
if inputs[i] != input_0:
raise ValueError(f"For \'{self.name}\', the shape of {i}th input should be same as "
f"first input {input_0}, but got {inputs[i]}.")
return (inputs[0], [1])
def infer_dtype(self, inputs):

Loading…
Cancel
Save