diff --git a/mindspore/nn/layer/basic.py b/mindspore/nn/layer/basic.py index b5bc2cbb99..68b90ed77b 100644 --- a/mindspore/nn/layer/basic.py +++ b/mindspore/nn/layer/basic.py @@ -188,10 +188,10 @@ class Dense(Cell): ValueError: If weight_init or bias_init shape is incorrect. Inputs: - - **input** (Tensor) - Tensor of shape :math:`(N, in\_channels)`. + - **input** (Tensor) - Tensor of shape :math:`(*, in\_channels)`. Outputs: - Tensor of shape :math:`(N, out\_channels)`. + Tensor of shape :math:`(*, out\_channels)`. Examples: >>> input = Tensor(np.random.randint(0, 255, [2, 3]), mindspore.float32) @@ -200,7 +200,7 @@ class Dense(Cell): [[ 2.5246444 2.2738023 0.5711005 -3.9399147 ] [ 1.0739875 4.0155234 0.94188046 -5.459526 ]] """ - @cell_attr_register(attrs=['has_bias', 'activation']) + @cell_attr_register(attrs=['has_bias', 'activation', 'in_channels', 'out_channels']) def __init__(self, in_channels, out_channels, diff --git a/tests/st/ops/ascend/test_dense.py b/tests/st/ops/ascend/test_dense.py index 65b1eb400f..acb90b362e 100644 --- a/tests/st/ops/ascend/test_dense.py +++ b/tests/st/ops/ascend/test_dense.py @@ -31,6 +31,18 @@ class Net(nn.Cell): def construct(self, x): return self.dense(x) +class MultiLayerDense(nn.Cell): + def __init__(self): + super(MultiLayerDense, self).__init__() + self.dense1 = nn.Dense(in_channels=256, out_channels=512) + self.dense1 = nn.Dense(in_channels=512, out_channels=1024) + + @ms_function + def construct(self, x): + x = self.dense1(x) + x = self.dense2(x) + return x + def test_net(): x = np.random.randn(32, 2048).astype(np.float32) @@ -46,3 +58,11 @@ def test_net_ND(): output = net(Tensor(x)) print(x) print(output.asnumpy()) + + +def test_net_multilayer(): + x = np.random.randn(16, 32, 256).astype(np.float32) + net = MultiLayerDense() + output = net(Tensor(x)) + print(x) + print(output.asnumpy()) diff --git a/tests/st/ops/gpu/test_dense.py b/tests/st/ops/gpu/test_dense.py new file mode 100644 index 0000000000..0b69869266 --- /dev/null +++ b/tests/st/ops/gpu/test_dense.py @@ -0,0 +1,65 @@ +# Copyright 2019 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +import numpy as np + +import mindspore.context as context +import mindspore.nn as nn +from mindspore import Tensor + +context.set_context(device_target="GPU") + + +class Net(nn.Cell): + def __init__(self): + super(Net, self).__init__() + self.dense = nn.Dense(2048, 1001) + + def construct(self, x): + return self.dense(x) + +class MultiLayerDense(nn.Cell): + def __init__(self): + super(MultiLayerDense, self).__init__() + self.dense1 = nn.Dense(in_channels=256, out_channels=512) + self.dense1 = nn.Dense(in_channels=512, out_channels=1024) + + def construct(self, x): + x = self.dense1(x) + x = self.dense2(x) + return x + + +def test_net(): + x = np.random.randn(32, 2048).astype(np.float32) + net = Net() + output = net(Tensor(x)) + print(x) + print(output.asnumpy()) + + +def test_net_ND(): + x = np.random.randn(2, 332, 2048).astype(np.float32) + net = Net() + output = net(Tensor(x)) + print(x) + print(output.asnumpy()) + + +def test_net_multilayer(): + x = np.random.randn(16, 32, 256).astype(np.float32) + net = MultiLayerDense() + output = net(Tensor(x)) + print(x) + print(output.asnumpy())