pylint waring clean

pull/1042/head
jinyaohui 5 years ago
parent a2d5ad5abe
commit 26fd75895d

@ -18,13 +18,13 @@
# pylint: disable=missing-docstring, arguments-differ, W0612 # pylint: disable=missing-docstring, arguments-differ, W0612
import os import os
import mindspore.common.dtype as mstype import mindspore.common.dtype as mstype
import mindspore.context as context import mindspore.context as context
from mindspore import Tensor from mindspore import Tensor
from mindspore.nn.optim import AdamWeightDecayDynamicLR
from mindspore.model_zoo.Bert_NEZHA import BertConfig, BertNetworkWithLoss, BertTrainOneStepCell, \ from mindspore.model_zoo.Bert_NEZHA import BertConfig, BertNetworkWithLoss, BertTrainOneStepCell, \
BertTrainOneStepWithLossScaleCell BertTrainOneStepWithLossScaleCell
from mindspore.nn.wrap.loss_scale import FixedLossScaleUpdateCell from mindspore.nn.optim import AdamWeightDecayDynamicLR
from mindspore.train.loss_scale_manager import DynamicLossScaleManager from mindspore.train.loss_scale_manager import DynamicLossScaleManager
from ...dataset_mock import MindData from ...dataset_mock import MindData
from ...ops_common import nn, np, batch_tuple_tensor, build_construct_graph from ...ops_common import nn, np, batch_tuple_tensor, build_construct_graph

@ -13,7 +13,6 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================
"""use ImageNetToMR tool generate mindrecord""" """use ImageNetToMR tool generate mindrecord"""
import os
from mindspore.mindrecord import ImageNetToMR from mindspore.mindrecord import ImageNetToMR
IMAGENET_MAP_FILE = "../../../ut/data/mindrecord/testImageNetDataWhole/labels_map.txt" IMAGENET_MAP_FILE = "../../../ut/data/mindrecord/testImageNetDataWhole/labels_map.txt"
@ -21,6 +20,7 @@ IMAGENET_IMAGE_DIR = "../../../ut/data/mindrecord/testImageNetDataWhole/images"
MINDRECORD_FILE = "./imagenet.mindrecord" MINDRECORD_FILE = "./imagenet.mindrecord"
PARTITION_NUMBER = 16 PARTITION_NUMBER = 16
def imagenet_to_mindrecord(): def imagenet_to_mindrecord():
imagenet_transformer = ImageNetToMR(IMAGENET_MAP_FILE, imagenet_transformer = ImageNetToMR(IMAGENET_MAP_FILE,
IMAGENET_IMAGE_DIR, IMAGENET_IMAGE_DIR,
@ -28,5 +28,6 @@ def imagenet_to_mindrecord():
PARTITION_NUMBER) PARTITION_NUMBER)
imagenet_transformer.transform() imagenet_transformer.transform()
if __name__ == '__main__': if __name__ == '__main__':
imagenet_to_mindrecord() imagenet_to_mindrecord()

@ -15,6 +15,7 @@
"""generate tfrecord""" """generate tfrecord"""
import collections import collections
import os import os
import tensorflow as tf import tensorflow as tf
IMAGENET_MAP_FILE = "../../../ut/data/mindrecord/testImageNetDataWhole/labels_map.txt" IMAGENET_MAP_FILE = "../../../ut/data/mindrecord/testImageNetDataWhole/labels_map.txt"
@ -22,6 +23,7 @@ IMAGENET_IMAGE_DIR = "../../../ut/data/mindrecord/testImageNetDataWhole/images"
TFRECORD_FILE = "./imagenet.tfrecord" TFRECORD_FILE = "./imagenet.tfrecord"
PARTITION_NUMBER = 16 PARTITION_NUMBER = 16
def get_imagenet_filename_label_pic(map_file, image_dir): def get_imagenet_filename_label_pic(map_file, image_dir):
""" """
Get data from imagenet. Get data from imagenet.
@ -69,18 +71,22 @@ def get_imagenet_filename_label_pic(map_file, image_dir):
continue continue
yield str(file_name), int(label), image_bytes yield str(file_name), int(label), image_bytes
def create_int_feature(values): def create_int_feature(values):
feature = tf.train.Feature(int64_list=tf.train.Int64List(value=[values])) feature = tf.train.Feature(int64_list=tf.train.Int64List(value=[values]))
return feature return feature
def create_string_feature(values): def create_string_feature(values):
feature = tf.train.Feature(bytes_list=tf.train.BytesList(value=[bytes(values, encoding='utf-8')])) feature = tf.train.Feature(bytes_list=tf.train.BytesList(value=[bytes(values, encoding='utf-8')]))
return feature return feature
def create_bytes_feature(values): def create_bytes_feature(values):
feature = tf.train.Feature(bytes_list=tf.train.BytesList(value=[values])) feature = tf.train.Feature(bytes_list=tf.train.BytesList(value=[values]))
return feature return feature
def imagenet_to_tfrecord(): def imagenet_to_tfrecord():
writers = [] writers = []
for i in range(PARTITION_NUMBER): for i in range(PARTITION_NUMBER):
@ -109,5 +115,6 @@ def imagenet_to_tfrecord():
print("Write {} total examples".format(total_written)) print("Write {} total examples".format(total_written))
if __name__ == '__main__': if __name__ == '__main__':
imagenet_to_tfrecord() imagenet_to_tfrecord()

@ -14,17 +14,20 @@
# ============================================================================ # ============================================================================
"""test dataset performance about mindspore.MindDataset, mindspore.TFRecordDataset, tf.data.TFRecordDataset""" """test dataset performance about mindspore.MindDataset, mindspore.TFRecordDataset, tf.data.TFRecordDataset"""
import time import time
import mindspore.dataset as ds
from mindspore.mindrecord import FileReader
import tensorflow as tf import tensorflow as tf
import mindspore.dataset as ds
from mindspore.mindrecord import FileReader
print_step = 5000 print_step = 5000
def print_log(count): def print_log(count):
if count % print_step == 0: if count % print_step == 0:
print("Read {} rows ...".format(count)) print("Read {} rows ...".format(count))
def use_filereader(mindrecord): def use_filereader(mindrecord):
start = time.time() start = time.time()
columns_list = ["data", "label"] columns_list = ["data", "label"]
@ -38,6 +41,7 @@ def use_filereader(mindrecord):
end = time.time() end = time.time()
print("Read by FileReader - total rows: {}, cost time: {}s".format(num_iter, end - start)) print("Read by FileReader - total rows: {}, cost time: {}s".format(num_iter, end - start))
def use_minddataset(mindrecord): def use_minddataset(mindrecord):
start = time.time() start = time.time()
columns_list = ["data", "label"] columns_list = ["data", "label"]
@ -51,6 +55,7 @@ def use_minddataset(mindrecord):
end = time.time() end = time.time()
print("Read by MindDataset - total rows: {}, cost time: {}s".format(num_iter, end - start)) print("Read by MindDataset - total rows: {}, cost time: {}s".format(num_iter, end - start))
def use_tfrecorddataset(tfrecord): def use_tfrecorddataset(tfrecord):
start = time.time() start = time.time()
columns_list = ["data", "label"] columns_list = ["data", "label"]
@ -66,8 +71,10 @@ def use_tfrecorddataset(tfrecord):
end = time.time() end = time.time()
print("Read by TFRecordDataset - total rows: {}, cost time: {}s".format(num_iter, end - start)) print("Read by TFRecordDataset - total rows: {}, cost time: {}s".format(num_iter, end - start))
def use_tensorflow_tfrecorddataset(tfrecord): def use_tensorflow_tfrecorddataset(tfrecord):
start = time.time() start = time.time()
def _parse_record(example_photo): def _parse_record(example_photo):
features = { features = {
'file_name': tf.io.FixedLenFeature([], tf.string), 'file_name': tf.io.FixedLenFeature([], tf.string),
@ -87,6 +94,7 @@ def use_tensorflow_tfrecorddataset(tfrecord):
end = time.time() end = time.time()
print("Read by TensorFlow TFRecordDataset - total rows: {}, cost time: {}s".format(num_iter, end - start)) print("Read by TensorFlow TFRecordDataset - total rows: {}, cost time: {}s".format(num_iter, end - start))
if __name__ == '__main__': if __name__ == '__main__':
# use MindDataset # use MindDataset
mindrecord = './imagenet.mindrecord00' mindrecord = './imagenet.mindrecord00'

@ -18,15 +18,14 @@
import numpy as np import numpy as np
import mindspore.nn as nn import mindspore.nn as nn
from mindspore.common.api import _executor import mindspore.ops.composite as C
from mindspore import Tensor from mindspore import Tensor
from mindspore.model_zoo.lenet import LeNet
from mindspore import context from mindspore import context
import mindspore.ops.composite as C from mindspore.common.api import _executor
from mindspore.model_zoo.lenet import LeNet
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
batch_size = 1 batch_size = 1
channel = 1 channel = 1
height = 32 height = 32
@ -36,6 +35,7 @@ num_class = 10
class LeNetGrad(nn.Cell): class LeNetGrad(nn.Cell):
"""Backward of LeNet""" """Backward of LeNet"""
def __init__(self, network): def __init__(self, network):
super(LeNetGrad, self).__init__() super(LeNetGrad, self).__init__()
self.grad_op = C.grad_all_with_sens self.grad_op = C.grad_all_with_sens

@ -17,10 +17,11 @@
import numpy as np import numpy as np
from mindspore.common.api import _executor
from mindspore import Tensor from mindspore import Tensor
from mindspore.common.api import _executor
from .resnet_example import resnet50 from .resnet_example import resnet50
def test_compile(): def test_compile():
net = resnet50() net = resnet50()
inp = Tensor(np.ones([1, 3, 224, 224]).astype(np.float32)) inp = Tensor(np.ones([1, 3, 224, 224]).astype(np.float32))

@ -20,9 +20,9 @@
import numpy as np import numpy as np
from mindspore import Tensor from mindspore import Tensor
from ..train_step_wrap import train_step_without_opt
from .resnet_example import resnet50 from .resnet_example import resnet50
from ..vm_impl import * from ..train_step_wrap import train_step_without_opt
def test_resnet50_pynative(): def test_resnet50_pynative():
net = train_step_without_opt(resnet50()) net = train_step_without_opt(resnet50())

@ -17,13 +17,15 @@
import numpy as np import numpy as np
from mindspore.common.api import _executor
import mindspore.context as context import mindspore.context as context
from mindspore import Tensor from mindspore import Tensor
from ..train_step_wrap import train_step_with_loss_warp from mindspore.common.api import _executor
from .resnet_example import resnet50 from .resnet_example import resnet50
from ..train_step_wrap import train_step_with_loss_warp
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
def test_train_step(): def test_train_step():
net = train_step_with_loss_warp(resnet50()) net = train_step_with_loss_warp(resnet50())
net.set_train() net.set_train()

@ -16,15 +16,15 @@
train step wrap train step wrap
""" """
import mindspore.nn as nn import mindspore.nn as nn
from mindspore.ops import functional as F from mindspore import ParameterTuple
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore.ops import operations as P
from mindspore import Parameter, ParameterTuple
class TrainStepWrap(nn.Cell): class TrainStepWrap(nn.Cell):
""" """
TrainStepWrap definition TrainStepWrap definition
""" """
def __init__(self, network): def __init__(self, network):
super(TrainStepWrap, self).__init__() super(TrainStepWrap, self).__init__()
self.network = network self.network = network
@ -39,10 +39,12 @@ class TrainStepWrap(nn.Cell):
grads = self.grad(self.network, weights)(x, label) grads = self.grad(self.network, weights)(x, label)
return self.optimizer(grads) return self.optimizer(grads)
class NetWithLossClass(nn.Cell): class NetWithLossClass(nn.Cell):
""" """
NetWithLossClass definition NetWithLossClass definition
""" """
def __init__(self, network): def __init__(self, network):
super(NetWithLossClass, self).__init__(auto_prefix=False) super(NetWithLossClass, self).__init__(auto_prefix=False)
self.loss = nn.SoftmaxCrossEntropyWithLogits() self.loss = nn.SoftmaxCrossEntropyWithLogits()
@ -61,6 +63,7 @@ class TrainStepWrap2(nn.Cell):
""" """
TrainStepWrap2 definition TrainStepWrap2 definition
""" """
def __init__(self, network, sens): def __init__(self, network, sens):
super(TrainStepWrap2, self).__init__() super(TrainStepWrap2, self).__init__()
self.network = network self.network = network
@ -76,13 +79,16 @@ class TrainStepWrap2(nn.Cell):
grads = self.grad(self.network, weights)(x, self.sens) grads = self.grad(self.network, weights)(x, self.sens)
return self.optimizer(grads) return self.optimizer(grads)
def train_step_with_sens(network, sens): def train_step_with_sens(network, sens):
return TrainStepWrap2(network, sens) return TrainStepWrap2(network, sens)
class TrainStepWrapWithoutOpt(nn.Cell): class TrainStepWrapWithoutOpt(nn.Cell):
""" """
TrainStepWrapWithoutOpt definition TrainStepWrapWithoutOpt definition
""" """
def __init__(self, network): def __init__(self, network):
super(TrainStepWrapWithoutOpt, self).__init__() super(TrainStepWrapWithoutOpt, self).__init__()
self.network = network self.network = network
@ -93,5 +99,6 @@ class TrainStepWrapWithoutOpt(nn.Cell):
grads = self.grad(self.network, self.weights)(x, label) grads = self.grad(self.network, self.weights)(x, label)
return grads return grads
def train_step_without_opt(network): def train_step_without_opt(network):
return TrainStepWrapWithoutOpt(NetWithLossClass(network)) return TrainStepWrapWithoutOpt(NetWithLossClass(network))

@ -28,6 +28,7 @@ context.set_context(mode=context.GRAPH_MODE)
def Xtest_arg_dict(): def Xtest_arg_dict():
class DictNet(Cell): class DictNet(Cell):
"""DictNet definition""" """DictNet definition"""
def __init__(self): def __init__(self):
super(DictNet, self).__init__() super(DictNet, self).__init__()
self.max = P.Maximum() self.max = P.Maximum()
@ -48,6 +49,7 @@ def Xtest_arg_dict():
def test_const_dict(): def test_const_dict():
class DictNet(Cell): class DictNet(Cell):
"""DictNet1 definition""" """DictNet1 definition"""
def __init__(self): def __init__(self):
super(DictNet, self).__init__() super(DictNet, self).__init__()
self.max = P.Maximum() self.max = P.Maximum()
@ -58,6 +60,7 @@ def test_const_dict():
a = self.max(self.dictionary["x"], self.dictionary["y"]) a = self.max(self.dictionary["x"], self.dictionary["y"])
b = self.min(self.dictionary["x"], self.dictionary["y"]) b = self.min(self.dictionary["x"], self.dictionary["y"])
return a + b return a + b
net = DictNet() net = DictNet()
net() net()
@ -65,6 +68,7 @@ def test_const_dict():
def test_dict_set_or_get_item(): def test_dict_set_or_get_item():
class DictNet(Cell): class DictNet(Cell):
"""DictNet1 definition""" """DictNet1 definition"""
def __init__(self): def __init__(self):
super(DictNet, self).__init__() super(DictNet, self).__init__()
self.dict_ = {"x": 1, "y": 2} self.dict_ = {"x": 1, "y": 2}
@ -91,6 +95,7 @@ def test_dict_set_or_get_item():
def test_dict_set_or_get_item_2(): def test_dict_set_or_get_item_2():
class DictNet(Cell): class DictNet(Cell):
"""DictNet1 definition""" """DictNet1 definition"""
def __init__(self): def __init__(self):
super(DictNet, self).__init__() super(DictNet, self).__init__()
@ -117,6 +122,7 @@ def test_dict_set_or_get_item_2():
def test_dict_set_or_get_item_3(): def test_dict_set_or_get_item_3():
class DictNet(Cell): class DictNet(Cell):
"""DictNet1 definition""" """DictNet1 definition"""
def __init__(self): def __init__(self):
super(DictNet, self).__init__() super(DictNet, self).__init__()
self.dict_ = {"x": Tensor(np.ones([2, 2, 3], np.float32)), "y": 1} self.dict_ = {"x": Tensor(np.ones([2, 2, 3], np.float32)), "y": 1}
@ -130,5 +136,3 @@ def test_dict_set_or_get_item_3():
net = DictNet() net = DictNet()
assert net() == Tensor(np.ones([4, 2, 3], np.float32)) assert net() == Tensor(np.ones([4, 2, 3], np.float32))

@ -13,7 +13,6 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================
import numpy as np import numpy as np
import pytest
from mindspore import Tensor, context from mindspore import Tensor, context
from mindspore.nn import Cell from mindspore.nn import Cell

@ -15,6 +15,7 @@
"""setup for pytest""" """setup for pytest"""
import mindspore.context as context import mindspore.context as context
# pylint: disable=unused-argument # pylint: disable=unused-argument
def setup_module(module): def setup_module(module):
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)

@ -16,6 +16,7 @@
resnet50 example resnet50 example
""" """
import numpy as np import numpy as np
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import operations as P from mindspore.ops import operations as P

@ -16,19 +16,21 @@
test assign add test assign add
""" """
import numpy as np import numpy as np
import mindspore as ms
import mindspore.context as context
import mindspore.nn as nn import mindspore.nn as nn
from mindspore.ops import operations as P
from mindspore.common.initializer import initializer
from mindspore import Tensor, Parameter from mindspore import Tensor, Parameter
import mindspore as ms from mindspore.common.initializer import initializer
from mindspore.ops import operations as P
from ..ut_filter import non_graph_engine from ..ut_filter import non_graph_engine
from mindspore.common.api import _executor
import mindspore.context as context
import pytest
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
class Net(nn.Cell): class Net(nn.Cell):
"""Net definition""" """Net definition"""
def __init__(self): def __init__(self):
super(Net, self).__init__() super(Net, self).__init__()
self.AssignAdd = P.AssignAdd() self.AssignAdd = P.AssignAdd()
@ -39,18 +41,19 @@ class Net(nn.Cell):
out = self.AssignAdd(self.inputdata, x) out = self.AssignAdd(self.inputdata, x)
return out return out
@non_graph_engine @non_graph_engine
def test_AssignAdd_1(): def test_AssignAdd_1():
"""test AssignAdd 1""" """test AssignAdd 1"""
import mindspore.context as context import mindspore.context as context
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
net = Net() net = Net()
x = Tensor(np.ones([1]).astype(np.int64)*100) x = Tensor(np.ones([1]).astype(np.int64) * 100)
print("MyPrintResult dataX:", x) print("MyPrintResult dataX:", x)
result = net(x) result = net(x)
print("MyPrintResult data::", result) print("MyPrintResult data::", result)
expect = np.ones([1]).astype(np.int64)*101 expect = np.ones([1]).astype(np.int64) * 101
diff = result.asnumpy() - expect diff = result.asnumpy() - expect
print("MyPrintExpect:", expect) print("MyPrintExpect:", expect)
@ -58,18 +61,19 @@ def test_AssignAdd_1():
error = np.ones(shape=[1]) * 1.0e-3 error = np.ones(shape=[1]) * 1.0e-3
assert np.all(diff < error) assert np.all(diff < error)
@non_graph_engine @non_graph_engine
def test_AssignAdd_2(): def test_AssignAdd_2():
"""test AssignAdd 2""" """test AssignAdd 2"""
import mindspore.context as context import mindspore.context as context
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
net = Net() net = Net()
x = Tensor(np.ones([1]).astype(np.int64)*102) x = Tensor(np.ones([1]).astype(np.int64) * 102)
print("MyPrintResult dataX:", x) print("MyPrintResult dataX:", x)
result = net(x) result = net(x)
print("MyPrintResult data::", result.asnumpy()) print("MyPrintResult data::", result.asnumpy())
expect = np.ones([1]).astype(np.int64)*103 expect = np.ones([1]).astype(np.int64) * 103
diff = result.asnumpy() - expect diff = result.asnumpy() - expect
print("MyPrintExpect:", expect) print("MyPrintExpect:", expect)
@ -77,8 +81,10 @@ def test_AssignAdd_2():
error = np.ones(shape=[1]) * 1.0e-3 error = np.ones(shape=[1]) * 1.0e-3
assert np.all(diff < error) assert np.all(diff < error)
class AssignAddNet(nn.Cell): class AssignAddNet(nn.Cell):
"""Net definition""" """Net definition"""
def __init__(self): def __init__(self):
super(AssignAddNet, self).__init__() super(AssignAddNet, self).__init__()
self.AssignAdd = P.AssignAdd() self.AssignAdd = P.AssignAdd()
@ -89,9 +95,10 @@ class AssignAddNet(nn.Cell):
z1 = self.AssignAdd(self.inputdata, self.one) z1 = self.AssignAdd(self.inputdata, self.one)
return z1 return z1
@non_graph_engine @non_graph_engine
def test_assignadd_scalar_cast(): def test_assignadd_scalar_cast():
net = AssignAddNet() net = AssignAddNet()
x = Tensor(np.ones([1]).astype(np.int64)*102) x = Tensor(np.ones([1]).astype(np.int64) * 102)
#_executor.compile(net, 1) # _executor.compile(net, 1)
result = net(x) result = net(x)

@ -14,6 +14,7 @@
# ============================================================================ # ============================================================================
""" test Activations """ """ test Activations """
import numpy as np import numpy as np
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import Tensor from mindspore import Tensor
from ..ut_filter import non_graph_engine from ..ut_filter import non_graph_engine

@ -16,15 +16,17 @@
test assign sub test assign sub
""" """
import numpy as np import numpy as np
import mindspore.context as context
import mindspore.nn as nn import mindspore.nn as nn
import mindspore.ops.operations as P import mindspore.ops.operations as P
from mindspore import Tensor from mindspore import Tensor
import mindspore.context as context
from mindspore.common.initializer import initializer from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
class Net(nn.Cell): class Net(nn.Cell):
def __init__(self): def __init__(self):
super(Net, self).__init__() super(Net, self).__init__()

@ -14,6 +14,7 @@
# ============================================================================ # ============================================================================
"""ut for batchnorm layer""" """ut for batchnorm layer"""
import numpy as np import numpy as np
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import Tensor from mindspore import Tensor
from ..ut_filter import non_graph_engine from ..ut_filter import non_graph_engine

@ -14,14 +14,17 @@
# ============================================================================ # ============================================================================
""" test BiasAdd """ """ test BiasAdd """
import numpy as np import numpy as np
import mindspore.nn as nn import mindspore.nn as nn
from mindspore.ops import operations as P
from mindspore.common.initializer import initializer
from mindspore import Tensor, Parameter from mindspore import Tensor, Parameter
from mindspore.common.initializer import initializer
from mindspore.ops import operations as P
from ..ut_filter import non_graph_engine from ..ut_filter import non_graph_engine
class Net(nn.Cell): class Net(nn.Cell):
"""Net definition""" """Net definition"""
def __init__(self, def __init__(self,
output_channels, output_channels,
bias_init='zeros', bias_init='zeros',

@ -14,6 +14,7 @@
# ============================================================================ # ============================================================================
"""test conv""" """test conv"""
import numpy as np import numpy as np
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import Tensor from mindspore import Tensor
from ..ut_filter import non_graph_engine from ..ut_filter import non_graph_engine
@ -25,6 +26,7 @@ out_channels = 64
class Net(nn.Cell): class Net(nn.Cell):
"""Net definition""" """Net definition"""
def __init__(self, def __init__(self,
cin, cin,
cout, cout,
@ -70,6 +72,7 @@ def test_compile2():
output = net(input_data) output = net(input_data)
print(output.asnumpy()) print(output.asnumpy())
@non_graph_engine @non_graph_engine
def test_compile3(): def test_compile3():
net = Net(3, 1, (3, 3), weight_init='ONES') net = Net(3, 1, (3, 3), weight_init='ONES')

@ -14,12 +14,15 @@
# ============================================================================ # ============================================================================
""" test Dense """ """ test Dense """
import numpy as np import numpy as np
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import Tensor from mindspore import Tensor
from ..ut_filter import non_graph_engine from ..ut_filter import non_graph_engine
class Net(nn.Cell): class Net(nn.Cell):
"""Net definition""" """Net definition"""
def __init__(self, def __init__(self,
input_channels, input_channels,
output_channels, output_channels,

@ -14,11 +14,12 @@
# ============================================================================ # ============================================================================
"""test eval""" """test eval"""
import numpy as np import numpy as np
import mindspore as ms import mindspore as ms
import mindspore.nn as nn import mindspore.nn as nn
from mindspore.common.api import _executor
from mindspore import Tensor from mindspore import Tensor
from mindspore import context from mindspore import context
from mindspore.common.api import _executor
from ..ut_filter import non_graph_engine from ..ut_filter import non_graph_engine

@ -16,8 +16,8 @@
import numpy as np import numpy as np
import mindspore.nn as nn import mindspore.nn as nn
from mindspore.ops import operations as P
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import operations as P
from ..ut_filter import non_graph_engine from ..ut_filter import non_graph_engine

@ -15,12 +15,12 @@
""" """
test pooling api test pooling api
""" """
import numpy as np
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import Tensor
class MaxNet(nn.Cell): class MaxNet(nn.Cell):
"""MaxNet definition""" """MaxNet definition"""
def __init__(self, def __init__(self,
kernel_size, kernel_size,
stride=None): stride=None):

@ -16,9 +16,11 @@
test softmax api test softmax api
""" """
import numpy as np import numpy as np
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import Tensor from mindspore import Tensor
class Net(nn.Cell): class Net(nn.Cell):
def __init__(self, dim): def __init__(self, dim):
super(Net, self).__init__() super(Net, self).__init__()

@ -14,10 +14,12 @@
# ============================================================================ # ============================================================================
""" test TensorAdd """ """ test TensorAdd """
import numpy as np import numpy as np
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import operations as P from mindspore.ops import operations as P
class Net(nn.Cell): class Net(nn.Cell):
def __init__(self): def __init__(self):
super(Net, self).__init__() super(Net, self).__init__()

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save