diff --git a/mindspore/nn/layer/activation.py b/mindspore/nn/layer/activation.py index b3ebe3af1b..3a754e4c03 100644 --- a/mindspore/nn/layer/activation.py +++ b/mindspore/nn/layer/activation.py @@ -303,7 +303,7 @@ class GELU(Cell): of standard Gaussian distribution and :math:`x_i` is the element of the input. Inputs: - - **input_data** (Tensor) - The input of Tanh. + - **input_data** (Tensor) - The input of GELU. Outputs: Tensor, with the same type and shape as the `input_data`. diff --git a/mindspore/ops/operations/__init__.py b/mindspore/ops/operations/__init__.py index 379fd5bca5..34650d22d5 100644 --- a/mindspore/ops/operations/__init__.py +++ b/mindspore/ops/operations/__init__.py @@ -234,6 +234,7 @@ __all__ = [ 'ReduceProd', 'CumProd', 'Log', + 'Log1p', 'SigmoidCrossEntropyWithLogits', 'FloorDiv', 'FloorMod',