parent
50b1cab122
commit
b837689e97
@ -0,0 +1,101 @@
|
|||||||
|
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#include "paddle/fluid/framework/ir/mkldnn/conv_activation_mkldnn_fuse_pass.h"
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
#include "paddle/fluid/platform/enforce.h"
|
||||||
|
|
||||||
|
namespace paddle {
|
||||||
|
namespace framework {
|
||||||
|
namespace ir {
|
||||||
|
|
||||||
|
void ConvActivationFusePass::ApplyImpl(ir::Graph* graph) const {
|
||||||
|
PADDLE_ENFORCE_NOT_NULL(graph, "graph cannot be nullptr.");
|
||||||
|
FusePassBase::Init("conv_activation_mkldnn_fuse", graph);
|
||||||
|
|
||||||
|
GraphPatternDetector gpd;
|
||||||
|
auto* conv_input = gpd.mutable_pattern()
|
||||||
|
->NewNode("conv_activation_mkldnn_fuse/conv_input")
|
||||||
|
->AsInput()
|
||||||
|
->assert_is_op_input(conv_type(), "Input");
|
||||||
|
patterns::ConvActivation conv_activation_pattern(
|
||||||
|
gpd.mutable_pattern(), "conv_activation_mkldnn_fuse");
|
||||||
|
conv_activation_pattern(conv_input, conv_type(), activation_type());
|
||||||
|
|
||||||
|
int found_conv_activation_count = 0;
|
||||||
|
auto handler = [&](const GraphPatternDetector::subgraph_t& subgraph,
|
||||||
|
Graph* g) {
|
||||||
|
VLOG(4) << "handle " + conv_type() + "+" + activation_type() + " fuse";
|
||||||
|
GET_IR_NODE_FROM_SUBGRAPH(conv_weight, conv_weight,
|
||||||
|
conv_activation_pattern); // Filter
|
||||||
|
GET_IR_NODE_FROM_SUBGRAPH(conv_out, conv_out,
|
||||||
|
conv_activation_pattern); // tmp
|
||||||
|
GET_IR_NODE_FROM_SUBGRAPH(conv, conv, conv_activation_pattern); // CONV op
|
||||||
|
GET_IR_NODE_FROM_SUBGRAPH(activation_out, activation_out,
|
||||||
|
conv_activation_pattern); // Out
|
||||||
|
GET_IR_NODE_FROM_SUBGRAPH(activation, activation,
|
||||||
|
conv_activation_pattern); // Activation op
|
||||||
|
|
||||||
|
// Transform Conv node into ConvActivation node.
|
||||||
|
OpDesc* desc = conv->Op();
|
||||||
|
desc->SetOutput("Output",
|
||||||
|
std::vector<std::string>({activation_out->Name()}));
|
||||||
|
|
||||||
|
desc->SetAttr("fuse_activation", activation_type());
|
||||||
|
|
||||||
|
// MKLDNN ops use alpha and beta as activation parameters but paddle ops are
|
||||||
|
// not generalized
|
||||||
|
if (activation_type() == "relu6") {
|
||||||
|
desc->SetAttr("fuse_alpha",
|
||||||
|
boost::get<float>(activation->Op()->GetAttr("threshold")));
|
||||||
|
} else {
|
||||||
|
desc->SetAttr("fuse_alpha",
|
||||||
|
activation->Op()->HasAttr("alpha")
|
||||||
|
? boost::get<float>(activation->Op()->GetAttr("alpha"))
|
||||||
|
: 0.0f);
|
||||||
|
}
|
||||||
|
desc->SetAttr("fuse_beta",
|
||||||
|
activation->Op()->HasAttr("beta")
|
||||||
|
? boost::get<float>(activation->Op()->GetAttr("beta"))
|
||||||
|
: 0.0f);
|
||||||
|
|
||||||
|
GraphSafeRemoveNodes(graph, {activation, conv_out});
|
||||||
|
|
||||||
|
PADDLE_ENFORCE_GT(subgraph.count(conv_input), 0UL,
|
||||||
|
"subgraph has to contain conv_input node.");
|
||||||
|
IR_NODE_LINK_TO(conv, activation_out);
|
||||||
|
found_conv_activation_count++;
|
||||||
|
};
|
||||||
|
|
||||||
|
gpd(graph, handler);
|
||||||
|
|
||||||
|
AddStatis(found_conv_activation_count);
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace ir
|
||||||
|
} // namespace framework
|
||||||
|
} // namespace paddle
|
||||||
|
|
||||||
|
REGISTER_PASS(conv_activation_mkldnn_fuse_pass,
|
||||||
|
paddle::framework::ir::ConvActivationFusePass);
|
||||||
|
|
||||||
|
REGISTER_PASS(conv_relu_mkldnn_fuse_pass,
|
||||||
|
paddle::framework::ir::ConvActivationFusePass);
|
||||||
|
|
||||||
|
REGISTER_PASS(conv_leaky_relu_mkldnn_fuse_pass,
|
||||||
|
paddle::framework::ir::Conv2DLeakyReLUFusePass);
|
||||||
|
|
||||||
|
REGISTER_PASS(conv_relu6_mkldnn_fuse_pass,
|
||||||
|
paddle::framework::ir::Conv2DReLU6FusePass);
|
@ -0,0 +1,55 @@
|
|||||||
|
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include "paddle/fluid/framework/ir/fuse_pass_base.h"
|
||||||
|
#include "paddle/fluid/framework/ir/graph.h"
|
||||||
|
#include "paddle/fluid/framework/ir/graph_pattern_detector.h"
|
||||||
|
#include "paddle/fluid/framework/ir/pass.h"
|
||||||
|
|
||||||
|
namespace paddle {
|
||||||
|
namespace framework {
|
||||||
|
namespace ir {
|
||||||
|
/*
|
||||||
|
* Fuse Conv and Activation base class.
|
||||||
|
*/
|
||||||
|
class ConvActivationFusePass : public FusePassBase {
|
||||||
|
public:
|
||||||
|
virtual ~ConvActivationFusePass() {}
|
||||||
|
virtual std::string conv_type() const { return "conv2d"; }
|
||||||
|
virtual std::string activation_type() const { return "relu"; }
|
||||||
|
|
||||||
|
protected:
|
||||||
|
void ApplyImpl(ir::Graph* graph) const override;
|
||||||
|
const std::string name_scope_{"conv_activation_mkldnn_fuse"};
|
||||||
|
};
|
||||||
|
/*
|
||||||
|
* Fuse Conv and LeakyReLU class
|
||||||
|
*/
|
||||||
|
class Conv2DLeakyReLUFusePass : public ConvActivationFusePass {
|
||||||
|
public:
|
||||||
|
std::string activation_type() const { return "leaky_relu"; }
|
||||||
|
};
|
||||||
|
/*
|
||||||
|
* Fuse Conv and BoundedReLU class
|
||||||
|
*/
|
||||||
|
class Conv2DReLU6FusePass : public ConvActivationFusePass {
|
||||||
|
public:
|
||||||
|
std::string activation_type() const { return "relu6"; }
|
||||||
|
};
|
||||||
|
} // namespace ir
|
||||||
|
} // namespace framework
|
||||||
|
} // namespace paddle
|
@ -0,0 +1,145 @@
|
|||||||
|
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#include "paddle/fluid/framework/ir/mkldnn/conv_activation_mkldnn_fuse_pass.h"
|
||||||
|
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
#include "paddle/fluid/framework/op_proto_maker.h"
|
||||||
|
|
||||||
|
namespace paddle {
|
||||||
|
namespace framework {
|
||||||
|
namespace ir {
|
||||||
|
|
||||||
|
void SetOp(ProgramDesc* prog, const std::string& type, const std::string& name,
|
||||||
|
const std::vector<std::string>& inputs,
|
||||||
|
const std::vector<std::string>& outputs, bool is_activation = false,
|
||||||
|
bool use_mkldnn = false) {
|
||||||
|
auto* op = prog->MutableBlock(0)->AppendOp();
|
||||||
|
op->SetType(type);
|
||||||
|
op->SetAttr("name", name);
|
||||||
|
if (type == "conv2d") {
|
||||||
|
op->SetAttr("use_mkldnn", use_mkldnn);
|
||||||
|
op->SetInput("Input", {inputs[0]});
|
||||||
|
op->SetInput("Filter", {inputs[1]});
|
||||||
|
op->SetInput("Bias", {inputs[2]});
|
||||||
|
} else if (is_activation) {
|
||||||
|
op->SetAttr("use_mkldnn", use_mkldnn);
|
||||||
|
op->SetInput("X", inputs);
|
||||||
|
if (type == "leaky_relu") {
|
||||||
|
op->SetAttr("alpha", 0.02f);
|
||||||
|
} else if (type == "relu6") {
|
||||||
|
op->SetAttr("threshold", 6.0f);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
op->SetOutput("Out", outputs);
|
||||||
|
op->SetAttr(OpProtoAndCheckerMaker::OpRoleAttrName(),
|
||||||
|
static_cast<int>(OpRole::kForward));
|
||||||
|
}
|
||||||
|
|
||||||
|
// a->OP0->b
|
||||||
|
// b->OP1->c
|
||||||
|
// (c, weights, bias)->conv->f
|
||||||
|
// (f)->activation->g
|
||||||
|
ProgramDesc BuildProgramDesc(std::string activation) {
|
||||||
|
ProgramDesc prog;
|
||||||
|
for (auto& v :
|
||||||
|
std::vector<std::string>({"a", "b", "c", "weights", "bias", "f", "g",
|
||||||
|
"h", "weights2", "bias2", "k", "l", "m"})) {
|
||||||
|
auto* var = prog.MutableBlock(0)->Var(v);
|
||||||
|
var->SetType(proto::VarType::SELECTED_ROWS);
|
||||||
|
if (v == "weights" || v == "bias" || v == "weights2" || v == "bias2") {
|
||||||
|
var->SetPersistable(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
SetOp(&prog, "OP0", "op0", std::vector<std::string>({"a"}),
|
||||||
|
std::vector<std::string>({"b"}));
|
||||||
|
SetOp(&prog, "OP1", "op1", std::vector<std::string>({"b"}),
|
||||||
|
std::vector<std::string>({"c"}));
|
||||||
|
// conv+activation, both with MKL-DNN
|
||||||
|
SetOp(&prog, "conv2d", "conv1",
|
||||||
|
std::vector<std::string>({"c", "weights", "bias"}),
|
||||||
|
std::vector<std::string>({"f"}), false, true);
|
||||||
|
SetOp(&prog, activation, "activation1", std::vector<std::string>({"f"}),
|
||||||
|
std::vector<std::string>({"g"}), true, true);
|
||||||
|
SetOp(&prog, "OP3", "op3", std::vector<std::string>({"g"}),
|
||||||
|
std::vector<std::string>({"h"}));
|
||||||
|
// conv+activation, only one with MKL-DNN
|
||||||
|
SetOp(&prog, "conv2d", "conv2",
|
||||||
|
std::vector<std::string>({"h", "weights2", "bias2"}),
|
||||||
|
std::vector<std::string>({"k"}), false, true);
|
||||||
|
SetOp(&prog, "activation", "activation2", std::vector<std::string>({"k"}),
|
||||||
|
std::vector<std::string>({"l"}), true, false);
|
||||||
|
SetOp(&prog, "OP4", "op4", std::vector<std::string>({"l"}),
|
||||||
|
std::vector<std::string>({"m"}));
|
||||||
|
|
||||||
|
return prog;
|
||||||
|
}
|
||||||
|
|
||||||
|
void MainTest(std::string activation) {
|
||||||
|
auto prog = BuildProgramDesc(activation);
|
||||||
|
|
||||||
|
std::unique_ptr<ir::Graph> graph(new ir::Graph(prog));
|
||||||
|
|
||||||
|
auto pass =
|
||||||
|
PassRegistry::Instance().Get("conv_" + activation + "_mkldnn_fuse_pass");
|
||||||
|
|
||||||
|
int original_nodes_num = graph->Nodes().size();
|
||||||
|
|
||||||
|
graph.reset(pass->Apply(graph.release()));
|
||||||
|
|
||||||
|
int current_nodes_num = graph->Nodes().size();
|
||||||
|
|
||||||
|
// Remove 3 Nodes: CONV, activation, conv_out
|
||||||
|
// Add 1 Node: ConvActivation
|
||||||
|
EXPECT_EQ(original_nodes_num - 2, current_nodes_num);
|
||||||
|
|
||||||
|
// Assert conv_activation op in newly generated graph
|
||||||
|
int conv_activation_count = 0;
|
||||||
|
|
||||||
|
for (auto* node : graph->Nodes()) {
|
||||||
|
if (node->IsOp() && node->Op()->Type() == "conv2d") {
|
||||||
|
auto* op = node->Op();
|
||||||
|
ASSERT_TRUE(op->HasAttr("use_mkldnn"));
|
||||||
|
EXPECT_TRUE(boost::get<bool>(op->GetAttr("use_mkldnn")));
|
||||||
|
auto op_name = boost::get<std::string>(op->GetAttr("name"));
|
||||||
|
std::string fuse_activation =
|
||||||
|
op->HasAttr("fuse_activation")
|
||||||
|
? boost::get<std::string>(op->GetAttr("fuse_activation"))
|
||||||
|
: "";
|
||||||
|
if (fuse_activation == activation) {
|
||||||
|
++conv_activation_count;
|
||||||
|
}
|
||||||
|
// check if only "conv1" convolution is fused
|
||||||
|
if (op_name == "conv1") {
|
||||||
|
ASSERT_TRUE(op->HasAttr("fuse_activation"));
|
||||||
|
} else if (op_name == "conv2") {
|
||||||
|
ASSERT_FALSE(op->HasAttr("fuse_activation"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EXPECT_EQ(conv_activation_count, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(ConvActivationFusePass, conv_relu_fuse_pass) { MainTest("relu"); }
|
||||||
|
TEST(ConvActivationFusePass, conv_leaky_relu_fuse_pass) {
|
||||||
|
MainTest("leaky_relu");
|
||||||
|
}
|
||||||
|
TEST(ConvActivationFusePass, conv_relu6_fuse_pass) { MainTest("relu6"); }
|
||||||
|
|
||||||
|
} // namespace ir
|
||||||
|
} // namespace framework
|
||||||
|
} // namespace paddle
|
||||||
|
|
||||||
|
USE_PASS(conv_activation_mkldnn_fuse_pass);
|
Loading…
Reference in new issue