diff --git a/tests/st/networks/test_cpu_lenet.py b/tests/st/networks/test_cpu_lenet.py index 9fd50f5d9b..bdcbc32382 100644 --- a/tests/st/networks/test_cpu_lenet.py +++ b/tests/st/networks/test_cpu_lenet.py @@ -78,4 +78,4 @@ def test_lenet(): data = Tensor(np.ones([32, 1, 32, 32]).astype(np.float32) * 0.01) label = Tensor(np.ones([32]).astype(np.int32)) net = LeNet() - train(net, data, label) + train(net, data, label) \ No newline at end of file diff --git a/tests/st/networks/test_network_main.py b/tests/st/networks/test_network_main.py index 7601739f8c..730602c0ae 100644 --- a/tests/st/networks/test_network_main.py +++ b/tests/st/networks/test_network_main.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ +<<<<<<< HEAD:tests/st/networks/test_network_main.py """ Function: test network @@ -31,6 +32,47 @@ from models.lenet import LeNet from models.resnetv1_5 import resnet50 from models.alexnet import AlexNet context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") +======= +import pytest +from mindspore.nn import TrainOneStepCell, WithLossCell +import mindspore.context as context +from mindspore.nn.optim import Momentum +import numpy as np +import mindspore.nn as nn +from mindspore.ops import operations as P +from mindspore import Tensor + +class LeNet(nn.Cell): + def __init__(self): + super(LeNet, self).__init__() + self.relu = P.ReLU() + self.batch_size = 32 + + self.conv1 = nn.Conv2d(1, 6, kernel_size=5, stride=1, padding=0, has_bias=False, pad_mode='valid') + self.conv2 = nn.Conv2d(6, 16, kernel_size=5, stride=1, padding=0, has_bias=False, pad_mode='valid') + self.pool = nn.MaxPool2d(kernel_size=2, stride=2) + self.reshape = P.Reshape() + self.fc1 = nn.Dense(400, 120) + self.fc2 = nn.Dense(120, 84) + self.fc3 = nn.Dense(84, 10) + + def construct(self, input_x): + output = self.conv1(input_x) + output = self.relu(output) + output = self.pool(output) + output = self.conv2(output) + output = self.relu(output) + output = self.pool(output) + output = self.reshape(output, (self.batch_size, -1)) + output = self.fc1(output) + output = self.relu(output) + output = self.fc2(output) + output = self.relu(output) + output = self.fc3(output) + return output + +context.set_context(mode=context.GRAPH_MODE, device_target="CPU") +>>>>>>> add cpu st lenet:tests/st/networks/test_cpu_lenet.py def train(net, data, label): learning_rate = 0.01 @@ -47,17 +89,24 @@ def train(net, data, label): print("+++++++++++++++++++++++++++") assert res +<<<<<<< HEAD:tests/st/networks/test_network_main.py def test_resnet50(): data = Tensor(np.ones([32, 3 ,224, 224]).astype(np.float32) * 0.01) label = Tensor(np.ones([32]).astype(np.int32)) net = resnet50(32, 10) train(net, data, label) +======= +@pytest.mark.level0 +@pytest.mark.platform_x86_cpu +@pytest.mark.env_onecard +>>>>>>> add cpu st lenet:tests/st/networks/test_cpu_lenet.py def test_lenet(): data = Tensor(np.ones([32, 1 ,32, 32]).astype(np.float32) * 0.01) label = Tensor(np.ones([32]).astype(np.int32)) net = LeNet() train(net, data, label) +<<<<<<< HEAD:tests/st/networks/test_network_main.py def test_alexnet(): data = Tensor(np.ones([32, 3 ,227, 227]).astype(np.float32) * 0.01) @@ -79,3 +128,5 @@ if __name__ == "__main__": test_alexnet() else: print("Please add net name like --net lenet") +======= +>>>>>>> add cpu st lenet:tests/st/networks/test_cpu_lenet.py