From fa519433ef94b0b5481af12aa65adf977077bf22 Mon Sep 17 00:00:00 2001 From: looop5 Date: Mon, 7 Dec 2020 15:07:48 +0800 Subject: [PATCH] expand ClipByNormNoDivSum --- .../graph_kernel/expanders/__init__.py | 1 + .../expanders/clip_by_norm_no_div_sum.py | 51 +++++++++++++++++++ .../graph_kernel/graph_kernel_helper.cc | 1 + mindspore/core/base/core_ops.h | 1 + 4 files changed, 54 insertions(+) create mode 100644 mindspore/_extends/graph_kernel/expanders/clip_by_norm_no_div_sum.py diff --git a/mindspore/_extends/graph_kernel/expanders/__init__.py b/mindspore/_extends/graph_kernel/expanders/__init__.py index 6d84481ee6..848c25d077 100644 --- a/mindspore/_extends/graph_kernel/expanders/__init__.py +++ b/mindspore/_extends/graph_kernel/expanders/__init__.py @@ -34,3 +34,4 @@ from .logsoftmax_grad import expand_logsoftmaxgrad from .gkdropout import expand_gkdropout from .tile import expand_tile from .sqrt_grad import expand_sqrtgrad +from .clip_by_norm_no_div_sum import expand_clipbynormnodivsum diff --git a/mindspore/_extends/graph_kernel/expanders/clip_by_norm_no_div_sum.py b/mindspore/_extends/graph_kernel/expanders/clip_by_norm_no_div_sum.py new file mode 100644 index 0000000000..ae5013e8cf --- /dev/null +++ b/mindspore/_extends/graph_kernel/expanders/clip_by_norm_no_div_sum.py @@ -0,0 +1,51 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# =========================================================================== +"""generate json desc for ClipByNormNoDivSum""" +from mindspore._extends.graph_kernel.model import model_builder as builder + + +def expand_clipbynormnodivsum(expand_info): + """ClipByNormNoDivSum expander""" + + # get op info. + input_desc_0 = expand_info['input_desc'][0] + input_desc_1 = expand_info['input_desc'][1] + input_desc_2 = expand_info['input_desc'][2] + input_desc_3 = expand_info['input_desc'][3] + graph_builder = builder.GraphBuilder() + + # generate a graph. + with graph_builder.graph_scope('main') as graph_scope: + # create tensor input. + input_x0 = graph_builder.tensor(input_desc_0['shape'], input_desc_0['data_type'], input_desc_0['format']) + input_x1 = graph_builder.tensor(input_desc_1['shape'], input_desc_1['data_type'], input_desc_1['format']) + input_x2 = graph_builder.tensor(input_desc_2['shape'], input_desc_2['data_type'], input_desc_2['format']) + input_x3 = graph_builder.tensor(input_desc_3['shape'], input_desc_3['data_type'], input_desc_3['format']) + graph_scope.set_input(input_x0, input_x1, input_x2, input_x3) + + # cal result + greater_res = graph_builder.emit('Greater', [input_x0, input_x1], attrs={'fusion': 'SelectGT_000'}) + select_res0 = graph_builder.emit('Select', [greater_res, input_x0, input_x2], + attrs={'fusion': 'SelectGT_000_end'}) + sqrt_res = graph_builder.emit('Sqrt', [select_res0]) + select_res1 = graph_builder.emit('Select', [greater_res, sqrt_res, input_x0], + attrs={'fusion': 'SelectGT_000_end'}) + result = graph_builder.emit('Maximum', [select_res1, input_x3]) + + # set graph output. + graph_scope.set_output(result) + + graph = graph_builder.get()[0] + return graph diff --git a/mindspore/ccsrc/backend/optimizer/graph_kernel/graph_kernel_helper.cc b/mindspore/ccsrc/backend/optimizer/graph_kernel/graph_kernel_helper.cc index 799817bad4..cd2e3ae2d8 100644 --- a/mindspore/ccsrc/backend/optimizer/graph_kernel/graph_kernel_helper.cc +++ b/mindspore/ccsrc/backend/optimizer/graph_kernel/graph_kernel_helper.cc @@ -705,6 +705,7 @@ std::unordered_set GetExpandOps() { #if ENABLE_D prim::kPrimTile, prim::kPrimSqrtGrad, + prim::kPrimClipByNormNoDivSum, #elif ENABLE_GPU prim::kPrimBiasAdd, prim::kPrimBiasAddGrad, diff --git a/mindspore/core/base/core_ops.h b/mindspore/core/base/core_ops.h index 8f433a324c..a905d1bafb 100644 --- a/mindspore/core/base/core_ops.h +++ b/mindspore/core/base/core_ops.h @@ -192,6 +192,7 @@ inline const PrimitivePtr kPrimSparseApplyProximalAdagrad = std::make_shared("FusedAdam"); inline const PrimitivePtr kPrimFusedAdamWeightDecay = std::make_shared("FusedAdamWeightDecay"); inline const PrimitivePtr kPrimSGD = std::make_shared("SGD"); +inline const PrimitivePtr kPrimClipByNormNoDivSum = std::make_shared("ClipByNormNoDivSum"); // Comm ops inline const PrimitivePtr kPrimMirror = std::make_shared("_MirrorOperator");