Implement a common python unittest to test the ir passes. (#22209)

* Implement a common python unittest to test the ir passes.
test=develop

* Save the results in np.array and support to startup on CPU.
test=develop

* Fix the unittest.
test=develop

* Add check_program to check whether the optimized program is different from the origin one.
test=develop

* Remove the inferface all_ops.
test=develop

* Add exception test in pass_test.
test=develop
revert-22710-feature/integrated_ps_api
Yiqun Liu 5 years ago committed by GitHub
parent 99f5907e02
commit b7cac50b64
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -165,4 +165,5 @@ int FCFusePass::ApplyFCPattern(Graph* graph, bool with_relu) const {
} // namespace framework
} // namespace paddle
REGISTER_PASS(fc_fuse_pass, paddle::framework::ir::FCFusePass);
REGISTER_PASS(fc_fuse_pass, paddle::framework::ir::FCFusePass)
.RequirePassAttr("use_gpu");

@ -27,12 +27,15 @@ Graph* Pass::Apply(Graph* graph) const {
CheckPrevPass();
PADDLE_ENFORCE(graph, "graph passed to Pass::Apply() cannot be empty.");
for (const std::string& attr : required_pass_attrs_) {
PADDLE_ENFORCE(attrs_.find(attr) != attrs_.end(),
"Required pass atrribute %s not set.", attr);
PADDLE_ENFORCE_NE(
attrs_.find(attr), attrs_.end(),
platform::errors::InvalidArgument(
"Required atrribute %s for pass < %s > is not set.", attr, Type()));
}
for (const std::string& attr : required_graph_attrs_) {
PADDLE_ENFORCE(graph->Has(attr), "Required graph atrribute %s not set.",
attr);
PADDLE_ENFORCE_EQ(graph->Has(attr), true,
platform::errors::InvalidArgument(
"Required atrribute %s for graph is not set.", attr));
}
ApplyImpl(graph);
// TODO(panyx0718): Add more verifications.

@ -60,10 +60,25 @@ class Pass {
try {
return *boost::any_cast<AttrType *>(attrs_.at(attr_name));
} catch (boost::bad_any_cast &) {
PADDLE_THROW(
"Invalid attribute type of %s error, expected: %s, actual: %s",
attr_name, typeid(AttrType *).name(),
attrs_.at(attr_name).type().name());
auto TypeToString = [](const std::type_info &info) -> std::string {
if (std::type_index(info) == std::type_index(typeid(bool *))) {
return "bool";
} else if (std::type_index(info) == std::type_index(typeid(int *))) {
return "int";
} else if (std::type_index(info) ==
std::type_index(typeid(const int *))) {
return "const int";
} else if (std::type_index(info) ==
std::type_index(typeid(std::string *))) {
return "std::string";
}
return info.name();
};
PADDLE_THROW(platform::errors::InvalidArgument(
"Invalid type for attritube %s, expected: %s, actual: %s", attr_name,
TypeToString(typeid(AttrType *)),
TypeToString(attrs_.at(attr_name).type())));
}
}

@ -63,18 +63,38 @@ TEST(PassTest, TestPassAttrCheck) {
} catch (paddle::platform::EnforceNotMet& e) {
exception = std::string(e.what());
}
ASSERT_TRUE(exception.find("test_pass_attr not set") != exception.npos);
ASSERT_TRUE(exception.find("Required atrribute test_pass_attr for pass < "
"test_pass > is not set") != exception.npos);
int val = 1;
graph.reset(new Graph(prog));
pass->SetNotOwned<int>("test_pass_attr", &val);
for (std::string try_type : {"bool", "const int", "std::string"}) {
try {
if (try_type == "bool") {
pass->Get<bool>("test_pass_attr");
} else if (try_type == "const int") {
pass->Get<const int>("test_pass_attr");
} else if (try_type == "std::string") {
pass->Get<std::string>("test_pass_attr");
}
} catch (paddle::platform::EnforceNotMet& e) {
exception = std::string(e.what());
}
std::string msg = "Invalid type for attritube test_pass_attr, expected: " +
try_type + ", actual: int";
ASSERT_TRUE(exception.find(msg) != exception.npos);
}
try {
graph.reset(pass->Apply(graph.release()));
} catch (paddle::platform::EnforceNotMet& e) {
exception = std::string(e.what());
}
ASSERT_TRUE(exception.find("test_graph_attr not set") != exception.npos);
ASSERT_TRUE(exception.find(
"Required atrribute test_graph_attr for graph is not set") !=
exception.npos);
graph.reset(new Graph(prog));
graph->Set<int>("test_graph_attr", new int);

@ -1597,6 +1597,8 @@ All parameter, weight, gradient are variables in Paddle.
[](ir::Pass &self, const std::string &name, const std::string &attr) {
self.Set<std::string>(name, new std::string(attr));
})
.def("set", [](ir::Pass &self, const std::string &name,
bool val) { self.Set<bool>(name, new bool(val)); })
.def("set", [](ir::Pass &self, const std::string &name,
int val) { self.Set<const int>(name, new int(val)); })
.def("set",

@ -336,6 +336,8 @@ if (WITH_MKLDNN)
add_subdirectory(mkldnn)
endif()
add_subdirectory(ir)
if (WITH_TESTING)
set_property(TEST test_parallel_executor_mnist PROPERTY ENVIRONMENT GLOG_vmodule=all_reduce_deps_pass=10)
endif()

@ -0,0 +1,6 @@
file(GLOB TEST_IR_PASSES RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}" "test_*.py")
string(REPLACE ".py" "" TEST_IR_PASSES "${TEST_IR_PASSES}")
foreach(target ${TEST_IR_PASSES})
py_test_modules(${target} MODULES ${target})
endforeach()

File diff suppressed because it is too large Load Diff

@ -0,0 +1,52 @@
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
from pass_test import PassTest
import paddle.fluid as fluid
import paddle.fluid.core as core
class FCFusePassTest(PassTest):
def setUp(self):
with fluid.program_guard(self.main_program, self.startup_program):
data = fluid.data(
name="data", shape=[32, 128], dtype="float32", lod_level=0)
tmp_0 = fluid.layers.fc(input=data,
size=128,
num_flatten_dims=1,
act="relu")
tmp_1 = fluid.layers.fc(input=tmp_0, size=32, num_flatten_dims=1)
tmp_2 = fluid.layers.softmax(input=tmp_1)
self.feeds = {"data": np.random.random((32, 128)).astype("float32")}
self.fetch_list = [tmp_0, tmp_1, tmp_2]
self.pass_names = "fc_fuse_pass"
self.fused_op_type = "fc"
self.num_fused_ops = 2
def test_check_output(self):
use_gpu_set = [False]
if core.is_compiled_with_cuda():
use_gpu_set.append(True)
for use_gpu in use_gpu_set:
self.pass_attrs = {"fc_fuse_pass": {"use_gpu": use_gpu}}
place = fluid.CUDAPlace(0) if use_gpu else fluid.CPUPlace()
self.check_output_with_place(place, startup_on_cpu=True)
if __name__ == "__main__":
unittest.main()
Loading…
Cancel
Save