Add Softplus,SoftplusGrad for old backend..

pull/7479/head
liangchenghui 4 years ago
parent 9db4900837
commit b4fda82ee5

@ -78,6 +78,8 @@ constexpr const char kNameApplyAdam[] = "Adam";
constexpr const char kNameExtractImagePatches[] = "ExtractImagePatches";
constexpr const char kNameReLU6[] = "ReLU6";
constexpr const char kNameReLU6Grad[] = "ReLU6Grad";
constexpr const char kNameSoftplus[] = "Softplus";
constexpr const char kNameSoftplusGrad[] = "SoftplusGrad";
constexpr const char kNameElu[] = "Elu";
constexpr const char kNameEluGrad[] = "EluGrad";
constexpr const char kNameTensorScatterUpdate[] = "TensorScatterUpdate";

@ -71,6 +71,18 @@ ATTR_MAP(Relu6Grad) = EMPTY_ATTR_MAP;
OUTPUT_MAP(Relu6Grad) = {{0, OUTPUT_DESC(backprops)}};
REG_ADPT_DESC(Relu6Grad, kNameReLU6Grad, ADPT_DESC(Relu6Grad))
// Softplus
INPUT_MAP(Softplus) = {{1, INPUT_DESC(x)}};
ATTR_MAP(Softplus) = EMPTY_ATTR_MAP;
OUTPUT_MAP(Softplus) = {{0, OUTPUT_DESC(y)}};
REG_ADPT_DESC(Softplus, kNameSoftplus, ADPT_DESC(Softplus))
// SoftplusGrad
INPUT_MAP(SoftplusGrad) = {{1, INPUT_DESC(gradients)}, {2, INPUT_DESC(features)}};
ATTR_MAP(SoftplusGrad) = EMPTY_ATTR_MAP;
OUTPUT_MAP(SoftplusGrad) = {{0, OUTPUT_DESC(backprops)}};
REG_ADPT_DESC(SoftplusGrad, kNameSoftplusGrad, ADPT_DESC(SoftplusGrad))
// ReluGrad
INPUT_MAP(ReluGrad) = {{1, INPUT_DESC(gradients)}, {2, INPUT_DESC(features)}};
ATTR_MAP(ReluGrad) = EMPTY_ATTR_MAP;

@ -19,8 +19,8 @@
#include <string>
#include <unordered_map>
#include "transform/graph_ir/op_declare/op_declare_macro.h"
#include "ops/nonlinear_fuc_ops.h"
#include "transform/graph_ir/op_declare/op_declare_macro.h"
namespace mindspore::transform {
DECLARE_OP_ADAPTER(ReluGrad)
@ -32,6 +32,12 @@ DECLARE_OP_USE_OUTPUT(Relu6)
DECLARE_OP_ADAPTER(Relu6Grad)
DECLARE_OP_USE_OUTPUT(Relu6Grad)
DECLARE_OP_ADAPTER(Softplus)
DECLARE_OP_USE_OUTPUT(Softplus)
DECLARE_OP_ADAPTER(SoftplusGrad)
DECLARE_OP_USE_OUTPUT(SoftplusGrad)
DECLARE_OP_ADAPTER(Tanh)
DECLARE_OP_USE_OUTPUT(Tanh)

Loading…
Cancel
Save