Rename davinci to ascend in ops st test module

pull/578/head
leonwanghui 5 years ago
parent 38ad56738c
commit b78b18e669

@ -1,44 +1,44 @@
# Copyright 2019 Huawei Technologies Co., Ltd # Copyright 2019 Huawei Technologies Co., Ltd
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
# You may obtain a copy of the License at # You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0 # http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, software # Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, # distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import operations as P from mindspore.ops import operations as P
import mindspore.nn as nn import mindspore.nn as nn
from mindspore.common.api import ms_function from mindspore.common.api import ms_function
import numpy as np import numpy as np
import mindspore.context as context import mindspore.context as context
from mindspore.common.initializer import initializer from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter from mindspore.common.parameter import Parameter
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
class Net(nn.Cell): class Net(nn.Cell):
def __init__(self): def __init__(self):
super(Net, self).__init__() super(Net, self).__init__()
self.apply_momentum = P.ApplyMomentum(gradient_scale=1024.0) self.apply_momentum = P.ApplyMomentum(gradient_scale=1024.0)
self.variable = Parameter(initializer( self.variable = Parameter(initializer(
'normal', [2, 3, 3, 4]), name='variable') 'normal', [2, 3, 3, 4]), name='variable')
self.accumulation = Parameter(initializer( self.accumulation = Parameter(initializer(
'normal', [2, 3, 3, 4]), name='accumulation') 'normal', [2, 3, 3, 4]), name='accumulation')
self.learning_rate = Parameter(initializer( self.learning_rate = Parameter(initializer(
'normal', [1, ]), name='learning_rate') 'normal', [1, ]), name='learning_rate')
self.gradient = Parameter(initializer( self.gradient = Parameter(initializer(
'normal', [2, 3, 3, 4]), name='gradient') 'normal', [2, 3, 3, 4]), name='gradient')
self.momentum = Parameter(initializer( self.momentum = Parameter(initializer(
'normal', [1, ]), name='momentum') 'normal', [1, ]), name='momentum')
def construct(self): def construct(self):
return self.apply_momentum(self.variable, self.accumulation, self.learning_rate, self.gradient, self.momentum) return self.apply_momentum(self.variable, self.accumulation, self.learning_rate, self.gradient, self.momentum)
def test_net(): def test_net():
apply_momentum = Net() apply_momentum = Net()
output = apply_momentum() output = apply_momentum()
print(output.asnumpy()) print(output.asnumpy())

@ -1,42 +1,42 @@
# Copyright 2019 Huawei Technologies Co., Ltd # Copyright 2019 Huawei Technologies Co., Ltd
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
# You may obtain a copy of the License at # You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0 # http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, software # Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, # distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import operations as P from mindspore.ops import operations as P
from mindspore.ops.operations import _grad_ops as G from mindspore.ops.operations import _grad_ops as G
import mindspore.nn as nn import mindspore.nn as nn
from mindspore.common.api import ms_function from mindspore.common.api import ms_function
import numpy as np import numpy as np
import mindspore.context as context import mindspore.context as context
from mindspore.common.initializer import initializer from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend") context.set_context(device_target="Ascend")
class Net(nn.Cell): class Net(nn.Cell):
def __init__(self): def __init__(self):
super(Net, self).__init__() super(Net, self).__init__()
self.bias_add_grad = G.BiasAddGrad() self.bias_add_grad = G.BiasAddGrad()
#self.dout = Parameter(initializer( #self.dout = Parameter(initializer(
#'normal', [2, 3, 3, 4]), name='dout') #'normal', [2, 3, 3, 4]), name='dout')
@ms_function @ms_function
def construct(self, dout): def construct(self, dout):
return self.bias_add_grad(dout) return self.bias_add_grad(dout)
dout = np.ones([2,3,4,4]).astype(np.float32) dout = np.ones([2,3,4,4]).astype(np.float32)
bias_add_grad = Net() bias_add_grad = Net()
output = bias_add_grad(Tensor(dout)) output = bias_add_grad(Tensor(dout))
expect_output = np.array([32.,32.,32.]).astype(np.float32) expect_output = np.array([32.,32.,32.]).astype(np.float32)
assert np.all(output.asnumpy()==expect_output), "bias_add_grad execute failed, please check current code commit" assert np.all(output.asnumpy()==expect_output), "bias_add_grad execute failed, please check current code commit"
print(output.asnumpy()) print(output.asnumpy())

@ -1,39 +1,39 @@
# Copyright 2019 Huawei Technologies Co., Ltd # Copyright 2019 Huawei Technologies Co., Ltd
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
# You may obtain a copy of the License at # You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0 # http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, software # Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, # distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import operations as P from mindspore.ops import operations as P
from mindspore.ops.operations import _grad_ops as G from mindspore.ops.operations import _grad_ops as G
import mindspore.nn as nn import mindspore.nn as nn
from mindspore.common.api import ms_function from mindspore.common.api import ms_function
import numpy as np import numpy as np
import mindspore.context as context import mindspore.context as context
from mindspore.common.initializer import initializer from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter from mindspore.common.parameter import Parameter
context.set_context(device_target="Ascend") context.set_context(device_target="Ascend")
class Net(nn.Cell): class Net(nn.Cell):
def __init__(self): def __init__(self):
super(Net, self).__init__() super(Net, self).__init__()
self.bias_add_grad = G.BiasAddGrad() self.bias_add_grad = G.BiasAddGrad()
@ms_function @ms_function
def construct(self, dout): def construct(self, dout):
return self.bias_add_grad(dout) return self.bias_add_grad(dout)
def test_net(): def test_net():
dout = np.random.rand(1, 1001).astype(np.float32) dout = np.random.rand(1, 1001).astype(np.float32)
bias_add_grad = Net() bias_add_grad = Net()
output = bias_add_grad(dout) output = bias_add_grad(dout)
print(output.asnumpy()) print(output.asnumpy())

@ -1,44 +1,44 @@
# Copyright 2020 Huawei Technologies Co., Ltd # Copyright 2020 Huawei Technologies Co., Ltd
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
# You may obtain a copy of the License at # You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0 # http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, software # Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, # distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import operations as P from mindspore.ops import operations as P
import mindspore.nn as nn import mindspore.nn as nn
import numpy as np import numpy as np
import mindspore.context as context import mindspore.context as context
context.set_context(mode=context.GRAPH_MODE, context.set_context(mode=context.GRAPH_MODE,
device_target="Ascend") device_target="Ascend")
class Net(nn.Cell): class Net(nn.Cell):
def __init__(self): def __init__(self):
super(Net, self).__init__() super(Net, self).__init__()
self.mask = P.DropoutGenMask(10, 28) self.mask = P.DropoutGenMask(10, 28)
self.shape = P.Shape() self.shape = P.Shape()
def construct(self, x, y): def construct(self, x, y):
shape_x = self.shape(x) shape_x = self.shape(x)
return self.mask(shape_x, y) return self.mask(shape_x, y)
x = np.ones([2, 4, 2, 2]).astype(np.int32) x = np.ones([2, 4, 2, 2]).astype(np.int32)
y = np.array([1.0]).astype(np.float32) y = np.array([1.0]).astype(np.float32)
def test_net(): def test_net():
mask = Net() mask = Net()
tx, ty = Tensor(x), Tensor(y) tx, ty = Tensor(x), Tensor(y)
output = mask(tx, ty) output = mask(tx, ty)
print(output.asnumpy()) print(output.asnumpy())
assert ([255, 255, 255, 255] == output.asnumpy()).all() assert ([255, 255, 255, 255] == output.asnumpy()).all()

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save