diff --git a/example/alexnet_cifar10/README.md b/model_zoo/alexnet/README.md similarity index 100% rename from example/alexnet_cifar10/README.md rename to model_zoo/alexnet/README.md diff --git a/mindspore/model_zoo/alexnet.py b/model_zoo/alexnet/alexnet.py similarity index 95% rename from mindspore/model_zoo/alexnet.py rename to model_zoo/alexnet/alexnet.py index 7ad1c8e37b..c528ae39e9 100644 --- a/mindspore/model_zoo/alexnet.py +++ b/model_zoo/alexnet/alexnet.py @@ -36,10 +36,9 @@ class AlexNet(nn.Cell): """ Alexnet """ - def __init__(self, num_classes=10): + def __init__(self, num_classes=10, channel=3): super(AlexNet, self).__init__() - self.batch_size = 32 - self.conv1 = conv(3, 96, 11, stride=4) + self.conv1 = conv(channel, 96, 11, stride=4) self.conv2 = conv(96, 256, 5, pad_mode="same") self.conv3 = conv(256, 384, 3, pad_mode="same") self.conv4 = conv(384, 384, 3, pad_mode="same") diff --git a/example/alexnet_cifar10/config.py b/model_zoo/alexnet/config.py similarity index 100% rename from example/alexnet_cifar10/config.py rename to model_zoo/alexnet/config.py diff --git a/example/alexnet_cifar10/dataset.py b/model_zoo/alexnet/dataset.py similarity index 96% rename from example/alexnet_cifar10/dataset.py rename to model_zoo/alexnet/dataset.py index d62ed2852d..fe18225791 100644 --- a/example/alexnet_cifar10/dataset.py +++ b/model_zoo/alexnet/dataset.py @@ -23,7 +23,7 @@ import mindspore.dataset.transforms.vision.c_transforms as CV from mindspore.common import dtype as mstype -def create_dataset(data_path, batch_size=32, repeat_size=1, status="train"): +def create_dataset_mnist(data_path, batch_size=32, repeat_size=1, status="train"): """ create dataset for train or test """ diff --git a/example/alexnet_cifar10/eval.py b/model_zoo/alexnet/eval.py similarity index 91% rename from example/alexnet_cifar10/eval.py rename to model_zoo/alexnet/eval.py index 503af7cf75..c59284e05f 100644 --- a/example/alexnet_cifar10/eval.py +++ b/model_zoo/alexnet/eval.py @@ -20,10 +20,10 @@ python eval.py --data_path /YourDataPath --ckpt_path Your.ckpt import argparse from config import alexnet_cfg as cfg -from dataset import create_dataset +from dataset import create_dataset_mnist +from alexnet import AlexNet import mindspore.nn as nn from mindspore import context -from mindspore.model_zoo.alexnet import AlexNet from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train import Model from mindspore.nn.metrics import Accuracy @@ -50,9 +50,8 @@ if __name__ == "__main__": print("============== Starting Testing ==============") param_dict = load_checkpoint(args.ckpt_path) load_param_into_net(network, param_dict) - ds_eval = create_dataset(args.data_path, - cfg.batch_size, - 1, - "test") + ds_eval = create_dataset_mnist(args.data_path, + cfg.batch_size, + status="test") acc = model.eval(ds_eval, dataset_sink_mode=args.dataset_sink_mode) print("============== Accuracy:{} ==============".format(acc)) diff --git a/example/alexnet_cifar10/generator_lr.py b/model_zoo/alexnet/generator_lr.py similarity index 100% rename from example/alexnet_cifar10/generator_lr.py rename to model_zoo/alexnet/generator_lr.py diff --git a/example/alexnet_cifar10/train.py b/model_zoo/alexnet/train.py similarity index 92% rename from example/alexnet_cifar10/train.py rename to model_zoo/alexnet/train.py index 4a3cfdd89f..2ebadec89f 100644 --- a/example/alexnet_cifar10/train.py +++ b/model_zoo/alexnet/train.py @@ -20,14 +20,14 @@ python train.py --data_path /YourDataPath import argparse from config import alexnet_cfg as cfg -from dataset import create_dataset +from dataset import create_dataset_mnist from generator_lr import get_lr +from alexnet import AlexNet import mindspore.nn as nn from mindspore import context from mindspore import Tensor from mindspore.train import Model from mindspore.nn.metrics import Accuracy -from mindspore.model_zoo.alexnet import AlexNet from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor @@ -50,9 +50,9 @@ if __name__ == "__main__": model = Model(network, loss, opt, metrics={"Accuracy": Accuracy()}) # test print("============== Starting Training ==============") - ds_train = create_dataset(args.data_path, - cfg.batch_size, - cfg.epoch_size) + ds_train = create_dataset_mnist(args.data_path, + cfg.batch_size, + cfg.epoch_size) time_cb = TimeMonitor(data_size=ds_train.get_dataset_size()) config_ck = CheckpointConfig(save_checkpoint_steps=cfg.save_checkpoint_steps, keep_checkpoint_max=cfg.keep_checkpoint_max) diff --git a/example/lenet_mnist/README.md b/model_zoo/lenet/README.md similarity index 100% rename from example/lenet_mnist/README.md rename to model_zoo/lenet/README.md diff --git a/example/lenet_mnist/config.py b/model_zoo/lenet/config.py similarity index 100% rename from example/lenet_mnist/config.py rename to model_zoo/lenet/config.py diff --git a/example/lenet_mnist/dataset.py b/model_zoo/lenet/dataset.py similarity index 100% rename from example/lenet_mnist/dataset.py rename to model_zoo/lenet/dataset.py diff --git a/example/lenet_mnist/eval.py b/model_zoo/lenet/eval.py similarity index 98% rename from example/lenet_mnist/eval.py rename to model_zoo/lenet/eval.py index 8317785a66..ee1f794695 100644 --- a/example/lenet_mnist/eval.py +++ b/model_zoo/lenet/eval.py @@ -22,8 +22,8 @@ import os import argparse from dataset import create_dataset from config import mnist_cfg as cfg +from lenet import LeNet5 import mindspore.nn as nn -from mindspore.model_zoo.lenet import LeNet5 from mindspore import context from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.callback import ModelCheckpoint, CheckpointConfig diff --git a/mindspore/model_zoo/lenet.py b/model_zoo/lenet/lenet.py similarity index 95% rename from mindspore/model_zoo/lenet.py rename to model_zoo/lenet/lenet.py index 6e39c439bf..3864315dba 100644 --- a/mindspore/model_zoo/lenet.py +++ b/model_zoo/lenet/lenet.py @@ -50,11 +50,10 @@ class LeNet5(nn.Cell): >>> LeNet(num_class=10) """ - def __init__(self, num_class=10): + def __init__(self, num_class=10, channel=1): super(LeNet5, self).__init__() self.num_class = num_class - self.batch_size = 32 - self.conv1 = conv(1, 6, 5) + self.conv1 = conv(channel, 6, 5) self.conv2 = conv(6, 16, 5) self.fc1 = fc_with_initialize(16 * 5 * 5, 120) self.fc2 = fc_with_initialize(120, 84) diff --git a/example/lenet_mnist/train.py b/model_zoo/lenet/train.py similarity index 96% rename from example/lenet_mnist/train.py rename to model_zoo/lenet/train.py index 3186f5fca7..2c0022be8c 100644 --- a/example/lenet_mnist/train.py +++ b/model_zoo/lenet/train.py @@ -22,8 +22,8 @@ import os import argparse from config import mnist_cfg as cfg from dataset import create_dataset +from lenet import LeNet5 import mindspore.nn as nn -from mindspore.model_zoo.lenet import LeNet5 from mindspore import context from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor from mindspore.train import Model @@ -36,7 +36,7 @@ if __name__ == "__main__": help='device where the code will be implemented (default: Ascend)') parser.add_argument('--data_path', type=str, default="./MNIST_Data", help='path where the dataset is saved') - parser.add_argument('--dataset_sink_mode', type=bool, default=False, help='dataset_sink_mode is False or True') + parser.add_argument('--dataset_sink_mode', type=bool, default=True, help='dataset_sink_mode is False or True') args = parser.parse_args() diff --git a/tests/perf_test/lenet.py b/tests/perf_test/lenet.py new file mode 100644 index 0000000000..3864315dba --- /dev/null +++ b/tests/perf_test/lenet.py @@ -0,0 +1,78 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +"""LeNet.""" +import mindspore.nn as nn +from mindspore.common.initializer import TruncatedNormal + + +def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): + """weight initial for conv layer""" + weight = weight_variable() + return nn.Conv2d(in_channels, out_channels, + kernel_size=kernel_size, stride=stride, padding=padding, + weight_init=weight, has_bias=False, pad_mode="valid") + + +def fc_with_initialize(input_channels, out_channels): + """weight initial for fc layer""" + weight = weight_variable() + bias = weight_variable() + return nn.Dense(input_channels, out_channels, weight, bias) + + +def weight_variable(): + """weight initial""" + return TruncatedNormal(0.02) + + +class LeNet5(nn.Cell): + """ + Lenet network + + Args: + num_class (int): Num classes. Default: 10. + + Returns: + Tensor, output tensor + Examples: + >>> LeNet(num_class=10) + + """ + def __init__(self, num_class=10, channel=1): + super(LeNet5, self).__init__() + self.num_class = num_class + self.conv1 = conv(channel, 6, 5) + self.conv2 = conv(6, 16, 5) + self.fc1 = fc_with_initialize(16 * 5 * 5, 120) + self.fc2 = fc_with_initialize(120, 84) + self.fc3 = fc_with_initialize(84, self.num_class) + self.relu = nn.ReLU() + self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) + self.flatten = nn.Flatten() + + def construct(self, x): + x = self.conv1(x) + x = self.relu(x) + x = self.max_pool2d(x) + x = self.conv2(x) + x = self.relu(x) + x = self.max_pool2d(x) + x = self.flatten(x) + x = self.fc1(x) + x = self.relu(x) + x = self.fc2(x) + x = self.relu(x) + x = self.fc3(x) + return x diff --git a/tests/perf_test/test_lenet.py b/tests/perf_test/test_lenet.py index ef526e1fc2..72a6552f53 100644 --- a/tests/perf_test/test_lenet.py +++ b/tests/perf_test/test_lenet.py @@ -17,12 +17,12 @@ import numpy as np +from lenet import LeNet5 import mindspore.nn as nn import mindspore.ops.composite as C from mindspore import Tensor from mindspore import context from mindspore.common.api import _executor -from mindspore.model_zoo.lenet import LeNet context.set_context(mode=context.GRAPH_MODE) @@ -61,7 +61,7 @@ def test_compile(): def test_compile_grad(): """Compile forward and backward graph""" - net = LeNet(num_class=num_class) + net = LeNet5(num_class=num_class) inp = Tensor(np.array(np.random.randn(batch_size, channel, height, diff --git a/tests/st/networks/models/lenet.py b/tests/st/networks/models/lenet.py deleted file mode 100644 index ce0932ca21..0000000000 --- a/tests/st/networks/models/lenet.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2019 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================ -import mindspore.nn as nn -from mindspore.ops import operations as P - - -class LeNet(nn.Cell): - def __init__(self): - super(LeNet, self).__init__() - self.relu = P.ReLU() - self.batch_size = 32 - - self.conv1 = nn.Conv2d(1, 6, kernel_size=5, stride=1, padding=0, has_bias=False, pad_mode='valid') - self.conv2 = nn.Conv2d(6, 16, kernel_size=5, stride=1, padding=0, has_bias=False, pad_mode='valid') - self.pool = nn.MaxPool2d(kernel_size=2, stride=2) - self.reshape = P.Reshape() - self.fc1 = nn.Dense(400, 120) - self.fc2 = nn.Dense(120, 84) - self.fc3 = nn.Dense(84, 10) - - def construct(self, input_x): - output = self.conv1(input_x) - output = self.relu(output) - output = self.pool(output) - output = self.conv2(output) - output = self.relu(output) - output = self.pool(output) - output = self.reshape(output, (self.batch_size, -1)) - output = self.fc1(output) - output = self.relu(output) - output = self.fc2(output) - output = self.relu(output) - output = self.fc3(output) - return output diff --git a/tests/st/networks/test_gpu_lenet.py b/tests/st/networks/test_gpu_lenet.py index 87978011ab..038af92223 100644 --- a/tests/st/networks/test_gpu_lenet.py +++ b/tests/st/networks/test_gpu_lenet.py @@ -26,17 +26,66 @@ import mindspore.nn as nn from mindspore import Tensor from mindspore.common import dtype as mstype from mindspore.dataset.transforms.vision import Inter -from mindspore.model_zoo.lenet import LeNet5 from mindspore.nn import Dense, TrainOneStepCell, WithLossCell from mindspore.nn.metrics import Accuracy from mindspore.nn.optim import Momentum from mindspore.ops import operations as P from mindspore.train import Model from mindspore.train.callback import LossMonitor +from mindspore.common.initializer import TruncatedNormal context.set_context(mode=context.GRAPH_MODE, device_target="GPU") +def conv(in_channels, out_channels, kernel_size, stride=1, padding=0): + """weight initial for conv layer""" + weight = weight_variable() + return nn.Conv2d(in_channels, out_channels, + kernel_size=kernel_size, stride=stride, padding=padding, + weight_init=weight, has_bias=False, pad_mode="valid") + + +def fc_with_initialize(input_channels, out_channels): + """weight initial for fc layer""" + weight = weight_variable() + bias = weight_variable() + return nn.Dense(input_channels, out_channels, weight, bias) + + +def weight_variable(): + """weight initial""" + return TruncatedNormal(0.02) + + +class LeNet5(nn.Cell): + def __init__(self, num_class=10, channel=1): + super(LeNet5, self).__init__() + self.num_class = num_class + self.conv1 = conv(channel, 6, 5) + self.conv2 = conv(6, 16, 5) + self.fc1 = fc_with_initialize(16 * 5 * 5, 120) + self.fc2 = fc_with_initialize(120, 84) + self.fc3 = fc_with_initialize(84, self.num_class) + self.relu = nn.ReLU() + self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) + self.flatten = nn.Flatten() + + def construct(self, x): + x = self.conv1(x) + x = self.relu(x) + x = self.max_pool2d(x) + x = self.conv2(x) + x = self.relu(x) + x = self.max_pool2d(x) + x = self.flatten(x) + x = self.fc1(x) + x = self.relu(x) + x = self.fc2(x) + x = self.relu(x) + x = self.fc3(x) + return x + + class LeNet(nn.Cell): def __init__(self): super(LeNet, self).__init__()