!11384 update some op's attr name

From: @yuchaojie
Reviewed-by: 
Signed-off-by:
pull/11384/MERGE
mindspore-ci-bot 4 years ago committed by Gitee
commit e3f150d329

File diff suppressed because one or more lines are too long

@ -83,7 +83,7 @@ class BiasAddGpuKernel : public GpuKernel {
MS_LOG(EXCEPTION) << "input dims must be at least 2, but got " << num_dims;
}
std::string format = GetAttr<std::string>(kernel_node, "data_format");
std::string format = GetAttr<std::string>(kernel_node, "format");
string::size_type pos = format.find("C");
if (pos == std::string::npos || pos >= num_dims) {
MS_LOG(EXCEPTION) << "format '" << format << "' invalid";

@ -78,7 +78,7 @@ class BiasAddGradGpuKernel : public GpuKernel {
MS_LOG(EXCEPTION) << "input dims must be at least 2, but got " << num_dims;
}
std::string format = GetAttr<std::string>(kernel_node, "data_format");
std::string format = GetAttr<std::string>(kernel_node, "format");
string::size_type pos = format.find("C");
if (pos == std::string::npos || pos >= num_dims) {
MS_LOG(EXCEPTION) << "format '" << format << "' invalid";

@ -86,7 +86,7 @@ class Conv2dGpuFwdKernel : public GpuKernel {
}
cudnn_data_type_ = GetCudnnDataType(TypeIdLabel(AnfAlgo::GetInputDeviceDataType(kernel_node, 0)));
data_format_ = AnfAlgo::GetInputFormat(kernel_node, 0);
auto format_attr = GetAttr<std::string>(kernel_node, "data_format");
auto format_attr = GetAttr<std::string>(kernel_node, "format");
if (format_attr == kOpFormat_NHWC) {
data_format_ = kOpFormat_NHWC;
}

@ -119,7 +119,7 @@ class ConvGradFilterGpuBkwKernel : public GpuKernel {
return true;
}
data_format_ = AnfAlgo::GetInputFormat(kernel_node, 0);
format_attr_ = GetAttr<std::string>(kernel_node, "data_format");
format_attr_ = GetAttr<std::string>(kernel_node, "format");
if (format_attr_ == kOpFormat_NHWC) {
data_format_ = kOpFormat_NHWC;
}

@ -109,7 +109,7 @@ class ConvGradInputGpuBkwKernel : public GpuKernel {
}
cudnn_data_type_ = GetCudnnDataType(TypeIdLabel(AnfAlgo::GetInputDeviceDataType(kernel_node, 0)));
data_format_ = AnfAlgo::GetInputFormat(kernel_node, 0);
auto format_attr = GetAttr<std::string>(kernel_node, "data_format");
auto format_attr = GetAttr<std::string>(kernel_node, "format");
if (format_attr == kOpFormat_NHWC) {
data_format_ = kOpFormat_NHWC;
}

@ -133,7 +133,7 @@ class FusedBatchNormExGpuKernel : public GpuKernel {
return true;
}
auto format = AnfAlgo::GetInputFormat(kernel_node, 0);
auto format_attr = GetAttr<std::string>(kernel_node, "data_format");
auto format_attr = GetAttr<std::string>(kernel_node, "format");
if (format_attr == kOpFormat_NHWC) {
format = kOpFormat_NHWC;
}

@ -104,7 +104,7 @@ class FusedBatchNormGpuKernel : public GpuKernel {
}
cudnnTensorFormat_t cudnn_format = CUDNN_TENSOR_NCHW;
auto format = AnfAlgo::GetInputFormat(kernel_node, 0);
auto format_attr = GetAttr<std::string>(kernel_node, "data_format");
auto format_attr = GetAttr<std::string>(kernel_node, "format");
if (format_attr == kOpFormat_NHWC) {
format = kOpFormat_NHWC;
cudnn_format = CUDNN_TENSOR_NHWC;

@ -143,7 +143,7 @@ class FusedBatchNormGradExGpuKernel : public GpuKernel {
return true;
}
std::string format = AnfAlgo::GetInputFormat(kernel_node, 0);
auto format_attr = GetAttr<std::string>(kernel_node, "data_format");
auto format_attr = GetAttr<std::string>(kernel_node, "format");
if (format_attr == kOpFormat_NHWC) {
format = kOpFormat_NHWC;
}

@ -80,7 +80,7 @@ class PoolingGpuFwdKernel : public GpuKernel {
}
cudnn_data_type_ = GetCudnnDataType(TypeIdLabel(AnfAlgo::GetInputDeviceDataType(kernel_node, 0)));
data_format_ = AnfAlgo::GetInputFormat(kernel_node, 0);
auto format_attr = GetAttr<std::string>(kernel_node, "data_format");
auto format_attr = GetAttr<std::string>(kernel_node, "format");
if (format_attr == kOpFormat_NHWC) {
data_format_ = kOpFormat_NHWC;
}

@ -85,7 +85,7 @@ class PoolingGradGpuKernel : public GpuKernel {
auto dout_shape = AnfAlgo::GetInputDeviceShape(kernel_node, 2);
auto output_shape = AnfAlgo::GetOutputDeviceShape(kernel_node, 0);
auto data_format = AnfAlgo::GetInputFormat(kernel_node, 0);
format_attr_ = GetAttr<std::string>(kernel_node, "data_format");
format_attr_ = GetAttr<std::string>(kernel_node, "format");
if (format_attr_ == kOpFormat_NHWC) {
data_format = kOpFormat_NHWC;
}

@ -45,7 +45,7 @@ bool NeedUpdate(const CNodePtr &conv2d, std::vector<size_t> in_shape, std::vecto
if (group == 1) {
return false;
}
auto data_format = AnfAlgo::GetNodeAttr<std::string>(conv2d, kAttrDataFormat);
auto data_format = AnfAlgo::GetNodeAttr<std::string>(conv2d, kAttrFormat);
if (data_format != "NCHW") {
MS_LOG(EXCEPTION) << "Conv2D only supports NCHW when group > 1, but got " << data_format;
}
@ -199,7 +199,7 @@ CNodePtr CreateDepthwiseConv2DBackpropFilter(const FuncGraphPtr &graph, const CN
void SetCommonAttrs(const CNodePtr &conv2d, const CNodePtr &depth_conv) {
AnfAlgo::CopyNodeAttr(kAttrKernelSize, conv2d, depth_conv);
AnfAlgo::CopyNodeAttr(kAttrDilation, conv2d, depth_conv);
AnfAlgo::CopyNodeAttr(kAttrDataFormat, conv2d, depth_conv);
AnfAlgo::CopyNodeAttr(kAttrFormat, conv2d, depth_conv);
AnfAlgo::CopyNodeAttr(kAttrPadList, kAttrPads, conv2d, depth_conv);
AnfAlgo::CopyNodeAttr(kAttrPadMode, conv2d, depth_conv);
AnfAlgo::CopyNodeAttr(kAttrPad, conv2d, depth_conv);

@ -46,7 +46,7 @@ const AnfNodePtr BatchNormAddReluFusion::Process(const FuncGraphPtr &graph, cons
MS_EXCEPTION_IF_NULL(tuple_get_item);
auto batch_norm_ex = AnfAlgo::GetInputNode(utils::cast<CNodePtr>(tuple_get_item), 0);
MS_EXCEPTION_IF_NULL(batch_norm_ex);
auto format_attr = AnfAlgo::GetCNodePrimitive(batch_norm_ex)->GetAttr("data_format");
auto format_attr = AnfAlgo::GetCNodePrimitive(batch_norm_ex)->GetAttr("format");
MS_EXCEPTION_IF_NULL(format_attr);
auto format = GetValue<std::string>(format_attr);
if (AnfAlgo::GetInputFormat(batch_norm_ex, 0) != kOpFormat_NHWC && format != "NHWC") {

@ -97,7 +97,7 @@ void ReplaceOutput(const FuncGraphPtr &graph, const AnfNodePtr &bn_grad, const A
return;
}
// Replace orignal output
// Replace original output
auto manager = graph->manager();
MS_EXCEPTION_IF_NULL(manager);
sort(bn_outputs.begin(), bn_outputs.end(), CompareTupleGetitem);
@ -114,7 +114,7 @@ void ReplaceOutput(const FuncGraphPtr &graph, const AnfNodePtr &bn_grad, const A
bool PatternCheck(const FuncGraphPtr &graph, const AnfNodePtr &node) {
MS_EXCEPTION_IF_NULL(graph);
MS_EXCEPTION_IF_NULL(node);
auto format_attr = AnfAlgo::GetCNodePrimitive(node)->GetAttr("data_format");
auto format_attr = AnfAlgo::GetCNodePrimitive(node)->GetAttr("format");
MS_EXCEPTION_IF_NULL(format_attr);
auto format = GetValue<std::string>(format_attr);
if (AnfAlgo::GetInputFormat(node, 0) != kOpFormat_NHWC && format != "NHWC") {

@ -43,7 +43,7 @@ const AnfNodePtr BatchNormReluFusion::Process(const FuncGraphPtr &graph, const A
MS_EXCEPTION_IF_NULL(tuple_get_item);
auto batch_norm_ex = AnfAlgo::GetInputNode(utils::cast<CNodePtr>(tuple_get_item), 0);
MS_EXCEPTION_IF_NULL(batch_norm_ex);
auto format_attr = AnfAlgo::GetCNodePrimitive(batch_norm_ex)->GetAttr("data_format");
auto format_attr = AnfAlgo::GetCNodePrimitive(batch_norm_ex)->GetAttr("format");
MS_EXCEPTION_IF_NULL(format_attr);
auto format = GetValue<std::string>(format_attr);
if (AnfAlgo::GetInputFormat(batch_norm_ex, 0) != kOpFormat_NHWC && format != "NHWC") {

@ -39,7 +39,7 @@ const AnfNodePtr BatchNormReluGradFusion::Process(const FuncGraphPtr &graph, con
const EquivPtr &equiv) const {
MS_EXCEPTION_IF_NULL(graph);
MS_EXCEPTION_IF_NULL(node);
auto format_attr = AnfAlgo::GetCNodePrimitive(node)->GetAttr("data_format");
auto format_attr = AnfAlgo::GetCNodePrimitive(node)->GetAttr("format");
MS_EXCEPTION_IF_NULL(format_attr);
auto format = GetValue<std::string>(format_attr);
auto ms_context = MsContext::GetInstance();

@ -17,6 +17,7 @@
#include "pybind_api/ir/primitive_py.h"
#include <mutex>
#include <map>
#include "ir/signature.h"
#include "pipeline/jit/parse/data_converter.h"
#include "pipeline/jit/parse/python_adapter.h"
@ -36,6 +37,9 @@ namespace {
constexpr auto kBpropAttrName = "bprop";
constexpr auto kCellHookAttrName = "cell_hook";
constexpr auto kCellIDAttrName = "cell_id";
std::map<std::string, std::string> kOpAttrNameReplaceMap = {
{"data_format", "format"},
};
void SyncData(const py::object &arg) {
if (py::isinstance<py::tuple>(arg)) {
@ -273,6 +277,9 @@ void PrimitivePy::AddPyAttr(const py::str &name, const py::object &obj) {
if (!converted) {
MS_LOG(EXCEPTION) << "Attribute convert error with type: " << std::string(py::str(obj));
}
if (kOpAttrNameReplaceMap.find(attr_name) != kOpAttrNameReplaceMap.end()) {
attr_name = kOpAttrNameReplaceMap[attr_name];
}
(void)this->AddAttr(attr_name, converted_ret);
}

@ -247,8 +247,8 @@ void UpdateKernelFormatInfo(const CNodePtr &kernel_node, const std::vector<TypeI
}
auto prim = AnfAlgo::GetCNodePrimitive(kernel_node);
MS_EXCEPTION_IF_NULL(prim);
if (prim->HasAttr("data_format")) {
*origin_data_format = AnfAlgo::GetNodeAttr<std::string>(kernel_node, "data_format");
if (prim->HasAttr("format")) {
*origin_data_format = AnfAlgo::GetNodeAttr<std::string>(kernel_node, "format");
}
}
@ -342,8 +342,8 @@ void FormatTransformChecker::CheckSupportFormatTransform(const std::shared_ptr<s
return;
}
auto value = AnfAlgo::GetCNodePrimitive(kernel);
if (value != nullptr && value->GetAttr("data_format") != nullptr &&
GetValue<std::string>(value->GetAttr("data_format")) == kOpFormat_NHWC) {
if (value != nullptr && value->GetAttr("format") != nullptr &&
GetValue<std::string>(value->GetAttr("format")) == kOpFormat_NHWC) {
format_transform_ = false;
return;
}

@ -193,7 +193,7 @@ REG_ADPT_DESC(Exp, kNameExp, ADPT_DESC(Exp))
// BiasAdd
INPUT_MAP(BiasAdd) = {{1, INPUT_DESC(x)}, {2, INPUT_DESC(bias)}};
ATTR_MAP(BiasAdd) = {{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
ATTR_MAP(BiasAdd) = {{"format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
OUTPUT_MAP(BiasAdd) = {{0, OUTPUT_DESC(y)}};
REG_ADPT_DESC(BiasAdd, kNameBiasAdd, ADPT_DESC(BiasAdd))

@ -24,7 +24,7 @@ INPUT_MAP(BatchNorm) = {{1, INPUT_DESC(x)},
{3, INPUT_DESC(offset)},
{4, INPUT_DESC(mean)},
{5, INPUT_DESC(variance)}};
ATTR_MAP(BatchNorm) = {{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())},
ATTR_MAP(BatchNorm) = {{"format", ATTR_DESC(data_format, AnyTraits<std::string>())},
{"epsilon", ATTR_DESC(epsilon, AnyTraits<float>())},
{"is_training", ATTR_DESC(is_training, AnyTraits<bool>())}};
OUTPUT_MAP(BatchNorm) = {{0, OUTPUT_DESC(y)},
@ -40,7 +40,7 @@ INPUT_MAP(BatchNormGrad) = {{1, INPUT_DESC(y_backprop)},
{3, INPUT_DESC(scale)},
{4, INPUT_DESC(reserve_space_1)},
{5, INPUT_DESC(reserve_space_2)}};
ATTR_MAP(BatchNormGrad) = {{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())},
ATTR_MAP(BatchNormGrad) = {{"format", ATTR_DESC(data_format, AnyTraits<std::string>())},
{"epsilon", ATTR_DESC(epsilon, AnyTraits<float>())},
{"is_training", ATTR_DESC(is_training, AnyTraits<bool>())}};
OUTPUT_MAP(BatchNormGrad) = {{0, OUTPUT_DESC(x_backprop)},

@ -20,7 +20,7 @@
namespace mindspore::transform {
// BiasAddGrad
INPUT_MAP(BiasAddGrad) = {{1, INPUT_DESC(x)}};
ATTR_MAP(BiasAddGrad) = {{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
ATTR_MAP(BiasAddGrad) = {{"format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
OUTPUT_MAP(BiasAddGrad) = {{0, OUTPUT_DESC(y)}};
REG_ADPT_DESC(BiasAddGrad, prim::kPrimBiasAddGrad->name(), ADPT_DESC(BiasAddGrad))
@ -30,7 +30,7 @@ ATTR_MAP(Conv2D) = {
{"stride", ATTR_DESC(strides, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"pad_list", ATTR_DESC(pads, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"dilation", ATTR_DESC(dilations, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())},
{"format", ATTR_DESC(data_format, AnyTraits<std::string>())},
{"group", ATTR_DESC(groups, AnyTraits<int64_t>())},
};
OUTPUT_MAP(Conv2D) = {{0, OUTPUT_DESC(y)}};
@ -44,7 +44,7 @@ ATTR_MAP(Conv2DBackpropInputD) = {
{"pad_list", ATTR_DESC(pads, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"stride", ATTR_DESC(strides, "pad", AnyTraits<std::vector<int64_t>>())},
{"dilation", ATTR_DESC(dilations, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())},
{"format", ATTR_DESC(data_format, AnyTraits<std::string>())},
{"group", ATTR_DESC(groups, AnyTraits<int64_t>())},
};
OUTPUT_MAP(Conv2DBackpropInputD) = {{0, OUTPUT_DESC(y)}};
@ -58,7 +58,7 @@ ATTR_MAP(Conv2DBackpropFilterD) = {
{"pad_list", ATTR_DESC(pads, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"stride", ATTR_DESC(strides, "pad", AnyTraits<std::vector<int64_t>>())},
{"dilation", ATTR_DESC(dilations, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())},
{"format", ATTR_DESC(data_format, AnyTraits<std::string>())},
{"group", ATTR_DESC(groups, AnyTraits<int64_t>())},
};
OUTPUT_MAP(Conv2DBackpropFilterD) = {{0, OUTPUT_DESC(y)}};
@ -70,7 +70,7 @@ ATTR_MAP(DepthwiseConv2D) = {
{"stride", ATTR_DESC(strides, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"pads", ATTR_DESC(pads, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"dilation", ATTR_DESC(dilations, AnyTraits<std::vector<int64_t>>(), AnyTraits<std::vector<int64_t>>())},
{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())},
{"format", ATTR_DESC(data_format, AnyTraits<std::string>())},
};
OUTPUT_MAP(DepthwiseConv2D) = {{0, OUTPUT_DESC(y)}};
REG_ADPT_DESC(DepthwiseConv2D, prim::kPrimDepthwiseConv2dNative->name(), ADPT_DESC(DepthwiseConv2D))

@ -41,13 +41,13 @@ REG_ADPT_DESC(SoftmaxCrossEntropyWithLogits, prim::kPrimSoftmaxCrossEntropyWithL
// SmoothL1Loss
INPUT_MAP(SmoothL1Loss) = {{1, INPUT_DESC(predict)}, {2, INPUT_DESC(label)}};
ATTR_MAP(SmoothL1Loss) = {{"sigma", ATTR_DESC(sigma, AnyTraits<float>())}};
ATTR_MAP(SmoothL1Loss) = {{"beta", ATTR_DESC(sigma, AnyTraits<float>())}};
OUTPUT_MAP(SmoothL1Loss) = {{0, OUTPUT_DESC(loss)}};
REG_ADPT_DESC(SmoothL1Loss, kNameSmoothL1Loss, ADPT_DESC(SmoothL1Loss))
// SmoothL1LossGrad
INPUT_MAP(SmoothL1LossGrad) = {{1, INPUT_DESC(predict)}, {2, INPUT_DESC(label)}, {3, INPUT_DESC(dout)}};
ATTR_MAP(SmoothL1LossGrad) = {{"sigma", ATTR_DESC(sigma, AnyTraits<float>())}};
ATTR_MAP(SmoothL1LossGrad) = {{"beta", ATTR_DESC(sigma, AnyTraits<float>())}};
OUTPUT_MAP(SmoothL1LossGrad) = {{0, OUTPUT_DESC(gradient)}};
REG_ADPT_DESC(SmoothL1LossGrad, kNameSmoothL1LossGrad, ADPT_DESC(SmoothL1LossGrad))

@ -23,7 +23,7 @@ INPUT_MAP(MaxPool) = {{1, INPUT_DESC(x)}};
ATTR_MAP(MaxPool) = {{"kernel_size", ATTR_DESC(ksize, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"strides", ATTR_DESC(strides, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"pad_mode", ATTR_DESC(padding, AnyTraits<std::string>())},
{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
{"format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
OUTPUT_MAP(MaxPool) = {{0, OUTPUT_DESC(y)}};
REG_ADPT_DESC(MaxPool, kNameMaxPool, ADPT_DESC(MaxPool))
@ -32,7 +32,7 @@ INPUT_MAP(AvgPool) = {{1, INPUT_DESC(x)}};
ATTR_MAP(AvgPool) = {{"kernel_size", ATTR_DESC(ksize, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"strides", ATTR_DESC(strides, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"pad_mode", ATTR_DESC(padding, AnyTraits<std::string>())},
{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
{"format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
OUTPUT_MAP(AvgPool) = {{0, OUTPUT_DESC(y)}};
REG_ADPT_DESC(AvgPool, kNameAvgPool, ADPT_DESC(AvgPool))
@ -41,7 +41,7 @@ INPUT_MAP(MaxPoolGrad) = {{1, INPUT_DESC(x1)}, {2, INPUT_DESC(x2)}, {3, INPUT_DE
ATTR_MAP(MaxPoolGrad) = {{"kernel_size", ATTR_DESC(ksize, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"strides", ATTR_DESC(strides, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"pad_mode", ATTR_DESC(padding, AnyTraits<std::string>())},
{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
{"format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
OUTPUT_MAP(MaxPoolGrad) = {{0, OUTPUT_DESC(y)}};
REG_ADPT_DESC(MaxPoolGrad, kNameMaxPoolGrad, ADPT_DESC(MaxPoolGrad))
@ -50,7 +50,7 @@ INPUT_MAP(AvgPoolGrad) = {{1, INPUT_DESC(orig_input_shape)}, {2, INPUT_DESC(inpu
ATTR_MAP(AvgPoolGrad) = {{"kernel_size", ATTR_DESC(ksize, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"strides", ATTR_DESC(strides, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"pad_mode", ATTR_DESC(padding, AnyTraits<std::string>())},
{"data_format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
{"format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
OUTPUT_MAP(AvgPoolGrad) = {{0, OUTPUT_DESC(out_grad)}};
REG_ADPT_DESC(AvgPoolGrad, kNameAvgPoolGrad, ADPT_DESC(AvgPoolGrad))

@ -292,7 +292,7 @@ constexpr auto kAttrEpsilon = "epsilon";
constexpr auto kAttrFactor = "factor";
constexpr auto kAttrIsRef = "isRef";
constexpr auto kAttrDataShape = "data_shape";
constexpr auto kAttrDataFormat = "data_format";
constexpr auto kAttrFormat = "format";
constexpr auto kAttrAxis = "axis";
constexpr auto kAttrKeepDims = "keep_dims";
constexpr auto kAttrShapeGamma = "shape_gamma";

@ -32,7 +32,7 @@ batch_norm_op_info = TBERegOp("BatchNormFoldD") \
.attr("epsilon", "optional", "float", "all") \
.attr("is_training", "optional", "bool", "all") \
.attr("freeze_bn", "optional", "int", "all") \
.attr("data_format", "optional", "str", "all") \
.attr("format", "optional", "str", "all") \
.input(0, "x", False, "required", "all") \
.input(1, "x_sum", False, "required", "all") \
.input(2, "x_square_sum", False, "required", "all") \

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save