!1695 Fixing some tiny faults about Pylint in my code(ops)

Merge pull request !1695 from liuwenhao/master
pull/1695/MERGE
mindspore-ci-bot 5 years ago committed by Gitee
commit 3ec8f9bb40

@ -14,18 +14,17 @@
# ============================================================================ # ============================================================================
"""multitype_ops directory test case""" """multitype_ops directory test case"""
import numpy as np import numpy as np
from functools import partial, reduce import pytest
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import Tensor from mindspore import Tensor
from mindspore import dtype as mstype from mindspore import dtype as mstype
from mindspore.ops import functional as F, composite as C from mindspore.ops import functional as F
import mindspore.context as context import mindspore.context as context
import pytest
class TensorIntAutoCast(nn.Cell): class TensorIntAutoCast(nn.Cell):
def __init__(self, ): def __init__(self,):
super(TensorIntAutoCast, self).__init__() super(TensorIntAutoCast, self).__init__()
self.i = 2 self.i = 2
@ -35,7 +34,7 @@ class TensorIntAutoCast(nn.Cell):
class TensorFPAutoCast(nn.Cell): class TensorFPAutoCast(nn.Cell):
def __init__(self, ): def __init__(self,):
super(TensorFPAutoCast, self).__init__() super(TensorFPAutoCast, self).__init__()
self.f = 1.2 self.f = 1.2
@ -45,7 +44,7 @@ class TensorFPAutoCast(nn.Cell):
class TensorBoolAutoCast(nn.Cell): class TensorBoolAutoCast(nn.Cell):
def __init__(self, ): def __init__(self,):
super(TensorBoolAutoCast, self).__init__() super(TensorBoolAutoCast, self).__init__()
self.f = True self.f = True
@ -55,7 +54,7 @@ class TensorBoolAutoCast(nn.Cell):
class TensorAutoCast(nn.Cell): class TensorAutoCast(nn.Cell):
def __init__(self, ): def __init__(self,):
super(TensorAutoCast, self).__init__() super(TensorAutoCast, self).__init__()
def construct(self, t1, t2): def construct(self, t1, t2):
@ -65,7 +64,7 @@ class TensorAutoCast(nn.Cell):
def test_tensor_auto_cast(): def test_tensor_auto_cast():
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
t0 = Tensor([True, False], mstype.bool_) Tensor([True, False], mstype.bool_)
t_uint8 = Tensor(np.ones([2, 1, 2, 2]), mstype.uint8) t_uint8 = Tensor(np.ones([2, 1, 2, 2]), mstype.uint8)
t_int8 = Tensor(np.ones([2, 1, 2, 2]), mstype.int8) t_int8 = Tensor(np.ones([2, 1, 2, 2]), mstype.int8)
t_int16 = Tensor(np.ones([2, 1, 2, 2]), mstype.int16) t_int16 = Tensor(np.ones([2, 1, 2, 2]), mstype.int16)

@ -13,7 +13,6 @@
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================
""" test nn ops """ """ test nn ops """
import functools
import numpy as np import numpy as np
import mindspore.nn as nn import mindspore.nn as nn
import mindspore.common.dtype as mstype import mindspore.common.dtype as mstype

@ -14,10 +14,10 @@
# ============================================================================ # ============================================================================
import pytest import pytest
import mindspore.nn as nn
from mindspore.common.api import ms_function
import numpy as np import numpy as np
import mindspore.nn as nn
import mindspore.context as context import mindspore.context as context
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer from mindspore.common.initializer import initializer
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore.ops import operations as P from mindspore.ops import operations as P
@ -196,10 +196,6 @@ def test_multi_layer_bilstm():
bidirectional = True bidirectional = True
dropout = 0.0 dropout = 0.0
num_directions = 1
if bidirectional:
num_directions = 2
net = MultiLayerBiLstmNet(seq_len, batch_size, input_size, hidden_size, num_layers, has_bias, bidirectional, net = MultiLayerBiLstmNet(seq_len, batch_size, input_size, hidden_size, num_layers, has_bias, bidirectional,
dropout) dropout)
y, h, c, _, _ = net() y, h, c, _, _ = net()
@ -305,9 +301,6 @@ def test_grad():
has_bias = True has_bias = True
bidirectional = False bidirectional = False
dropout = 0.0 dropout = 0.0
num_directions = 1
if bidirectional:
num_directions = 2
net = Grad(Net(seq_len, batch_size, input_size, hidden_size, num_layers, has_bias, bidirectional, dropout)) net = Grad(Net(seq_len, batch_size, input_size, hidden_size, num_layers, has_bias, bidirectional, dropout))
dy = np.array([[[-3.5471e-01, 7.0540e-01], dy = np.array([[[-3.5471e-01, 7.0540e-01],
[2.7161e-01, 1.0865e+00]], [2.7161e-01, 1.0865e+00]],

@ -94,7 +94,7 @@ def test_random_crop_and_resize_op_py(plot=False):
for item1, item2 in zip(data1.create_dict_iterator(), data2.create_dict_iterator()): for item1, item2 in zip(data1.create_dict_iterator(), data2.create_dict_iterator()):
crop_and_resize = (item1["image"].transpose(1, 2, 0) * 255).astype(np.uint8) crop_and_resize = (item1["image"].transpose(1, 2, 0) * 255).astype(np.uint8)
original = (item2["image"].transpose(1, 2, 0) * 255).astype(np.uint8) original = (item2["image"].transpose(1, 2, 0) * 255).astype(np.uint8)
original = cv2.resize(original, (512,256)) original = cv2.resize(original, (512, 256))
mse = diff_mse(crop_and_resize, original) mse = diff_mse(crop_and_resize, original)
logger.info("random_crop_and_resize_op_{}, mse: {}".format(num_iter + 1, mse)) logger.info("random_crop_and_resize_op_{}, mse: {}".format(num_iter + 1, mse))
num_iter += 1 num_iter += 1

@ -78,4 +78,4 @@ def test_layer_switch():
net = MySwitchNet() net = MySwitchNet()
x = Tensor(np.ones((3, 3, 24, 24)), mindspore.float32) x = Tensor(np.ones((3, 3, 24, 24)), mindspore.float32)
index = Tensor(0, dtype=mindspore.int32) index = Tensor(0, dtype=mindspore.int32)
y = net(x, index) net(x, index)

@ -28,7 +28,7 @@ from ....mindspore_test_framework.pipeline.forward.compile_forward \
class AssignAddNet(nn.Cell): class AssignAddNet(nn.Cell):
def __init__(self, ): def __init__(self,):
super(AssignAddNet, self).__init__() super(AssignAddNet, self).__init__()
self.op = P.AssignAdd() self.op = P.AssignAdd()
self.inputdata = Parameter(Tensor(np.zeros([1]).astype(np.bool_), mstype.bool_), name="assign_add1") self.inputdata = Parameter(Tensor(np.zeros([1]).astype(np.bool_), mstype.bool_), name="assign_add1")
@ -39,7 +39,7 @@ class AssignAddNet(nn.Cell):
class AssignSubNet(nn.Cell): class AssignSubNet(nn.Cell):
def __init__(self, ): def __init__(self,):
super(AssignSubNet, self).__init__() super(AssignSubNet, self).__init__()
self.op = P.AssignSub() self.op = P.AssignSub()
self.inputdata = Parameter(Tensor(np.zeros([1]).astype(np.bool_), mstype.bool_), name="assign_sub1") self.inputdata = Parameter(Tensor(np.zeros([1]).astype(np.bool_), mstype.bool_), name="assign_sub1")

Loading…
Cancel
Save