|
|
|
@ -313,6 +313,47 @@ TEST(MKLDNNLayer, AddtoLayer) {
|
|
|
|
|
testAddtoLayer({4, 12, 1, 1}, 3);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static void getMKLDNNConcatConfig(TestConfig& cfg,
|
|
|
|
|
const std::vector<testImageDesc>& inputs) {
|
|
|
|
|
CHECK_GE(inputs.size(), 2) << "at least two inputs";
|
|
|
|
|
int oc = inputs[0].ic;
|
|
|
|
|
for (size_t i = 1; i < inputs.size(); ++i) {
|
|
|
|
|
CHECK_EQ(inputs[i].bs, inputs[0].bs);
|
|
|
|
|
CHECK_EQ(inputs[i].ih, inputs[0].ih);
|
|
|
|
|
CHECK_EQ(inputs[i].iw, inputs[0].iw);
|
|
|
|
|
oc += inputs[i].ic;
|
|
|
|
|
}
|
|
|
|
|
cfg.biasSize = 0;
|
|
|
|
|
cfg.layerConfig.set_type("mkldnn_concat");
|
|
|
|
|
cfg.layerConfig.set_size(oc * inputs[0].ih * inputs[0].iw);
|
|
|
|
|
cfg.layerConfig.set_active_type("relu");
|
|
|
|
|
for (size_t i = 0; i < inputs.size(); ++i) {
|
|
|
|
|
std::stringstream ss;
|
|
|
|
|
ss << "layer_" << i;
|
|
|
|
|
cfg.inputDefs.push_back(
|
|
|
|
|
{INPUT_DATA,
|
|
|
|
|
ss.str(),
|
|
|
|
|
(size_t)(inputs[i].ic) * inputs[i].ih * inputs[i].iw,
|
|
|
|
|
0});
|
|
|
|
|
LayerInputConfig* input = cfg.layerConfig.add_inputs();
|
|
|
|
|
ImageConfig* img_conf = input->mutable_image_conf();
|
|
|
|
|
img_conf->set_channels(inputs[i].ic);
|
|
|
|
|
img_conf->set_img_size_y(inputs[i].ih);
|
|
|
|
|
img_conf->set_img_size(inputs[i].iw);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void testConcatLayer(const std::vector<testImageDesc>& inputs) {
|
|
|
|
|
TestConfig dnnConfig;
|
|
|
|
|
getMKLDNNConcatConfig(dnnConfig, inputs);
|
|
|
|
|
RUN_MKLDNN_TEST_LAYER(dnnConfig, "concat", inputs[0])
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
TEST(MKLDNNLayer, ConcatLayer) {
|
|
|
|
|
testConcatLayer({{64, 128, 1, 1}, {64, 32, 1, 1}, {64, 64, 1, 1}});
|
|
|
|
|
testConcatLayer({{32, 100, 8, 8}, {32, 10, 8, 8}});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void testActivation(std::string actType, const testImageDesc& pm) {
|
|
|
|
|
// TODO(TJ): remove me when paddle support elu activation
|
|
|
|
|
if (actType == "mkldnn_elu") {
|
|
|
|
|