parent
4e85ca68e8
commit
b6e77e5178
@ -0,0 +1,38 @@
|
|||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
"""ConfusionMulGrad op"""
|
||||||
|
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
|
||||||
|
|
||||||
|
confusion_mul_grad_op_info = TBERegOp("ConfusionMulGrad") \
|
||||||
|
.fusion_type("OPAQUE") \
|
||||||
|
.attr("axis", "required", "listInt", "all") \
|
||||||
|
.attr("keep_dims", "required", "bool", "all") \
|
||||||
|
.input(0, "input0", False, "required", "all") \
|
||||||
|
.input(1, "input1", False, "required", "all") \
|
||||||
|
.input(2, "input2", False, "required", "all") \
|
||||||
|
.output(0, "output0", False, "required", "all") \
|
||||||
|
.output(1, "output1", False, "required", "all") \
|
||||||
|
.dtype_format(DataType.F16_Default, DataType.F16_Default, DataType.F16_Default,
|
||||||
|
DataType.F16_Default, DataType.F16_Default) \
|
||||||
|
.dtype_format(DataType.F32_Default, DataType.F32_Default, DataType.F32_Default,
|
||||||
|
DataType.F32_Default, DataType.F32_Default) \
|
||||||
|
.get_op_info()
|
||||||
|
|
||||||
|
|
||||||
|
@op_info_register(confusion_mul_grad_op_info)
|
||||||
|
def _confusion_mul_grad_tbe():
|
||||||
|
"""ConfusionMulGrad TBE register"""
|
||||||
|
return
|
@ -0,0 +1,40 @@
|
|||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
"""ReluGradV2 op"""
|
||||||
|
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
|
||||||
|
|
||||||
|
relu_grad_v2_op_info = TBERegOp("ReluGradV2") \
|
||||||
|
.fusion_type("ELEMWISE") \
|
||||||
|
.async_flag(False) \
|
||||||
|
.binfile_name("relu_grad_v2.so") \
|
||||||
|
.compute_cost(10) \
|
||||||
|
.kernel_name("relu_grad_v2") \
|
||||||
|
.partial_flag(True) \
|
||||||
|
.input(0, "gradients", False, "required", "all") \
|
||||||
|
.input(1, "mask", False, "rerequired", "all") \
|
||||||
|
.output(0, "backprops", True, "required", "all") \
|
||||||
|
.dtype_format(DataType.F16_5HD, DataType.U8_Default, DataType.F16_5HD) \
|
||||||
|
.dtype_format(DataType.F32_5HD, DataType.U8_Default, DataType.F32_5HD) \
|
||||||
|
.dtype_format(DataType.I32_5HD, DataType.U8_Default, DataType.I32_5HD) \
|
||||||
|
.dtype_format(DataType.I8_5HD, DataType.U8_Default, DataType.I8_5HD) \
|
||||||
|
.dtype_format(DataType.U8_5HD, DataType.U8_Default, DataType.U8_5HD) \
|
||||||
|
.get_op_info()
|
||||||
|
|
||||||
|
|
||||||
|
@op_info_register(relu_grad_v2_op_info)
|
||||||
|
def _relu_grad_v2_tbe():
|
||||||
|
"""ReluGradV2 TBE register"""
|
||||||
|
return
|
@ -0,0 +1,40 @@
|
|||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
"""ReluV2 op"""
|
||||||
|
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
|
||||||
|
|
||||||
|
relu_v2_op_info = TBERegOp("ReLUV2") \
|
||||||
|
.fusion_type("ELEMWISE") \
|
||||||
|
.async_flag(False) \
|
||||||
|
.binfile_name("relu_v2.so") \
|
||||||
|
.compute_cost(10) \
|
||||||
|
.kernel_name("relu_v2") \
|
||||||
|
.partial_flag(True) \
|
||||||
|
.input(0, "x", False, "required", "all") \
|
||||||
|
.output(0, "y", False, "required", "all") \
|
||||||
|
.output(1, "mask", False, "required", "all") \
|
||||||
|
.dtype_format(DataType.F16_5HD, DataType.F16_5HD, DataType.U8_Default) \
|
||||||
|
.dtype_format(DataType.F32_5HD, DataType.F32_5HD, DataType.U8_Default) \
|
||||||
|
.dtype_format(DataType.I32_5HD, DataType.I32_5HD, DataType.U8_Default) \
|
||||||
|
.dtype_format(DataType.I8_5HD, DataType.I8_5HD, DataType.U8_Default) \
|
||||||
|
.dtype_format(DataType.U8_5HD, DataType.U8_5HD, DataType.U8_Default) \
|
||||||
|
.get_op_info()
|
||||||
|
|
||||||
|
|
||||||
|
@op_info_register(relu_v2_op_info)
|
||||||
|
def _relu_v2_tbe():
|
||||||
|
"""ReluV2 TBE register"""
|
||||||
|
return
|
@ -0,0 +1,53 @@
|
|||||||
|
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
from mindspore import Tensor
|
||||||
|
from mindspore.ops import operations as P
|
||||||
|
import mindspore.nn as nn
|
||||||
|
from mindspore.common.api import ms_function
|
||||||
|
import numpy as np
|
||||||
|
import mindspore.context as context
|
||||||
|
from mindspore.common.initializer import initializer
|
||||||
|
from mindspore.common.parameter import Parameter
|
||||||
|
from mindspore.ops.composite import GradOperation
|
||||||
|
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
|
||||||
|
|
||||||
|
class Grad(nn.Cell):
|
||||||
|
def __init__(self, network):
|
||||||
|
super(Grad, self).__init__()
|
||||||
|
self.grad = GradOperation(name="get_all", get_all=True)
|
||||||
|
self.network = network
|
||||||
|
|
||||||
|
@ms_function
|
||||||
|
def construct(self, input):
|
||||||
|
return self.grad(self.network)(input)
|
||||||
|
|
||||||
|
class Net(nn.Cell):
|
||||||
|
def __init__(self):
|
||||||
|
super(Net, self).__init__()
|
||||||
|
self.relu_v2 = P.ReLUV2()
|
||||||
|
|
||||||
|
def construct(self, x):
|
||||||
|
return self.relu_v2(x)
|
||||||
|
|
||||||
|
def test_net():
|
||||||
|
x = Tensor(np.ones((2,3,3,4)).astype(np.float32))
|
||||||
|
relu_net = Net()
|
||||||
|
relu_output = relu_net(x)
|
||||||
|
net = Grad(Net())
|
||||||
|
output_grad = net(x)
|
||||||
|
print(relu_output[0].asnumpy())
|
||||||
|
print(relu_output[1].asnumpy())
|
||||||
|
print(len(output_grad))
|
||||||
|
print(output_grad[0].asnumpy())
|
Loading…
Reference in new issue