cherry-pick feature/anakin-engine: add anakin softmax/transpose/batch_norm/flatten/reshape op (#16020)
* add anakin softmax/ flatten/reshape/transpose/batch_norm op convertermove-code
parent
b21770a2aa
commit
0945b97f07
@ -1,12 +1,16 @@
|
||||
cc_library(anakin_op_converter SRCS fc.cc conv2d.cc conv2d_fusion.cc
|
||||
elementwise.cc activation.cc pool2d.cc concat.cc split.cc relu.cc DEPS anakin_engine framework_proto scope op_registry)
|
||||
elementwise.cc activation.cc pool2d.cc concat.cc split.cc relu.cc softmax.cc batch_norm.cc reshape.cc flatten.cc transpose.cc DEPS anakin_engine framework_proto scope op_registry)
|
||||
cc_test(test_anakin_fc SRCS test_fc_op.cc DEPS anakin_op_converter mul_op)
|
||||
cc_test(test_anakin_conv2d SRCS test_conv2d_op.cc DEPS anakin_op_converter conv_op im2col vol2col depthwise_conv SERIAL)
|
||||
cc_test(test_anakin_activation SRCS test_activation_op.cc DEPS activation_op anakin_op_converter SERIAL)
|
||||
cc_test(test_anakin_conv2d SRCS test_conv2d_op.cc DEPS anakin_op_converter conv_op im2col vol2col depthwise_conv)
|
||||
cc_test(test_anakin_activation SRCS test_activation_op.cc DEPS activation_op anakin_op_converter)
|
||||
cc_test(test_anakin_pool2d SRCS test_pool2d_op.cc DEPS anakin_op_converter pool_op pooling)
|
||||
cc_test(test_anakin_concat SRCS test_concat_op.cc DEPS anakin_op_converter concat_op concat_and_split)
|
||||
cc_test(test_anakin_split SRCS test_split_op.cc DEPS anakin_op_converter split_op concat_and_split)
|
||||
cc_test(test_anakin_elementwise SRCS test_elementwise_op.cc DEPS
|
||||
anakin_op_converter elementwise_add_op)
|
||||
|
||||
cc_test(test_anakin_relu SRCS test_relu_op.cc DEPS activation_op anakin_op_converter SERIAL)
|
||||
cc_test(test_anakin_softmax SRCS test_softmax_op.cc DEPS anakin_op_converter softmax_op softmax)
|
||||
cc_test(test_anakin_reshape SRCS test_reshape_op.cc DEPS anakin_op_converter reshape_op)
|
||||
cc_test(test_anakin_flatten SRCS test_flatten_op.cc DEPS anakin_op_converter flatten_op reshape_op)
|
||||
cc_test(test_anakin_transpose SRCS test_transpose_op.cc DEPS anakin_op_converter transpose_op)
|
||||
cc_test(test_anakin_batch_norm SRCS test_batch_norm_op.cc DEPS anakin_op_converter batch_norm_op)
|
||||
|
@ -0,0 +1,122 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "paddle/fluid/inference/anakin/convert/batch_norm.h"
|
||||
#include <math.h>
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
using anakin::graph::GraphGlobalMem;
|
||||
using anakin::AK_FLOAT;
|
||||
using anakin::saber::NV;
|
||||
using anakin::saber::Shape;
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
void BatchNormOpConverter::operator()(const framework::proto::OpDesc &op,
|
||||
const framework::Scope &scope,
|
||||
bool test_mode) {
|
||||
framework::OpDesc op_desc(op, nullptr);
|
||||
PADDLE_ENFORCE_EQ(op_desc.Output("Y").size(), 1);
|
||||
std::map<std::string, std::string> inputs;
|
||||
for (auto k : {"X", "Scale", "Bias", "Mean", "Variance"}) {
|
||||
PADDLE_ENFORCE_EQ(op_desc.Input(k).size(), 1UL);
|
||||
auto v = op_desc.Input(k).front();
|
||||
inputs.insert({k, v});
|
||||
}
|
||||
|
||||
auto output = op_desc.Output("Y").front();
|
||||
auto op_name = op_desc.Type() + ":" + op_desc.Output("Y").front();
|
||||
engine_->AddOp(op_name, "Scale", {inputs["X"]}, {output});
|
||||
engine_->AddOpAttr(op_name, "bias_term", true);
|
||||
engine_->AddOpAttr(op_name, "axis", 1);
|
||||
engine_->AddOpAttr(op_name, "num_axes", 1);
|
||||
|
||||
bool is_test = boost::get<bool>(op_desc.GetAttr("is_test"));
|
||||
PADDLE_ENFORCE(is_test);
|
||||
float epsilon = boost::get<float>(op_desc.GetAttr("epsilon"));
|
||||
engine_->AddOpAttr(op_name, "epsilon", epsilon);
|
||||
|
||||
auto get_lod_tensor = [this, &scope, &op_name](const std::string &var_name,
|
||||
framework::LoDTensor *tensor) {
|
||||
auto *v = scope.FindVar(var_name);
|
||||
PADDLE_ENFORCE_NOT_NULL(v);
|
||||
auto *t = v->GetMutable<framework::LoDTensor>();
|
||||
tensor->Resize(t->dims());
|
||||
TensorCopySync(*t, platform::CPUPlace(), tensor);
|
||||
};
|
||||
|
||||
framework::LoDTensor bias_t;
|
||||
framework::LoDTensor mean_t;
|
||||
framework::LoDTensor scale_t;
|
||||
framework::LoDTensor variance_t;
|
||||
get_lod_tensor(inputs["Bias"], &bias_t);
|
||||
get_lod_tensor(inputs["Mean"], &mean_t);
|
||||
get_lod_tensor(inputs["Scale"], &scale_t);
|
||||
get_lod_tensor(inputs["Variance"], &variance_t);
|
||||
|
||||
auto *bias = bias_t.mutable_data<float>(platform::CPUPlace());
|
||||
auto *mean = mean_t.mutable_data<float>(platform::CPUPlace());
|
||||
auto *scale = scale_t.mutable_data<float>(platform::CPUPlace());
|
||||
auto *variance = variance_t.mutable_data<float>(platform::CPUPlace());
|
||||
|
||||
framework::LoDTensor combile_scale_t;
|
||||
framework::LoDTensor combile_bias_t;
|
||||
combile_scale_t.Resize(scale_t.dims());
|
||||
combile_bias_t.Resize(bias_t.dims());
|
||||
|
||||
auto *combile_scale =
|
||||
combile_scale_t.mutable_data<float>(platform::CPUPlace());
|
||||
auto *combile_bias = combile_bias_t.mutable_data<float>(platform::CPUPlace());
|
||||
|
||||
size_t elem_num = combile_scale_t.memory_size() / sizeof(float);
|
||||
for (size_t i = 0; i < elem_num; i++) {
|
||||
combile_scale[i] = scale[i] / sqrtf(variance[i] + epsilon);
|
||||
combile_bias[i] = bias[i] - mean[i] * combile_scale[i];
|
||||
}
|
||||
|
||||
auto fill_shape = [](size_t n, std::vector<int> *shape) {
|
||||
shape->insert(shape->begin(), 1);
|
||||
if (shape->size() < n) {
|
||||
shape->insert(shape->end(), n - shape->size(), 1);
|
||||
}
|
||||
};
|
||||
auto scale_shape = framework::vectorize2int(combile_scale_t.dims());
|
||||
auto bias_shape = framework::vectorize2int(combile_bias_t.dims());
|
||||
fill_shape(4, &scale_shape);
|
||||
fill_shape(4, &bias_shape);
|
||||
Shape weight1_shape(scale_shape);
|
||||
Shape weight2_shape(bias_shape);
|
||||
auto *weight1 =
|
||||
GraphGlobalMem<NV>::Global().template new_block<AK_FLOAT>(weight1_shape);
|
||||
auto *scale_data = static_cast<float *>(weight1->h_tensor().mutable_data());
|
||||
std::copy_n(combile_scale_t.data<float>(), combile_scale_t.numel(),
|
||||
scale_data);
|
||||
engine_->AddOpAttr(op_name, "weight_1", *weight1);
|
||||
|
||||
auto *weight2 =
|
||||
GraphGlobalMem<NV>::Global().template new_block<AK_FLOAT>(weight2_shape);
|
||||
auto *bias_data = static_cast<float *>(weight2->h_tensor().mutable_data());
|
||||
std::copy_n(combile_bias_t.data<float>(), combile_bias_t.numel(), bias_data);
|
||||
engine_->AddOpAttr(op_name, "weight_2", *weight2);
|
||||
}
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
||||
|
||||
REGISTER_ANAKIN_OP_CONVERTER(batch_norm, BatchNormOpConverter);
|
@ -0,0 +1,35 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "paddle/fluid/inference/anakin/convert/op_converter.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
class BatchNormOpConverter : public AnakinOpConverter {
|
||||
public:
|
||||
BatchNormOpConverter() = default;
|
||||
|
||||
virtual void operator()(const framework::proto::OpDesc &op,
|
||||
const framework::Scope &scope,
|
||||
bool test_mode) override;
|
||||
virtual ~BatchNormOpConverter() {}
|
||||
};
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
@ -0,0 +1,60 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "paddle/fluid/inference/anakin/convert/flatten.h"
|
||||
#include <vector>
|
||||
|
||||
using anakin::graph::GraphGlobalMem;
|
||||
using anakin::AK_FLOAT;
|
||||
using anakin::saber::NV;
|
||||
using anakin::saber::Shape;
|
||||
using anakin::PTuple;
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
void FlattenOpConverter::operator()(const framework::proto::OpDesc &op,
|
||||
const framework::Scope &scope,
|
||||
bool test_mode) {
|
||||
framework::OpDesc op_desc(op, nullptr);
|
||||
PADDLE_ENFORCE_EQ(op_desc.Input("X").size(), 1UL);
|
||||
PADDLE_ENFORCE_EQ(op_desc.Output("Out").size(), 1UL);
|
||||
|
||||
auto input = op_desc.Input("X").front();
|
||||
auto output = op_desc.Output("Out").front();
|
||||
auto in_dims = scope.FindVar(input)->Get<framework::LoDTensor>().dims();
|
||||
int axis = boost::get<int>(op_desc.GetAttr("axis"));
|
||||
|
||||
int inner = 1;
|
||||
int outer = 1;
|
||||
for (int i = 0; i < in_dims.size(); i++) {
|
||||
if (i < axis) {
|
||||
outer *= in_dims[i];
|
||||
} else {
|
||||
inner *= in_dims[i];
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<int> out_dims = {1, outer, inner, 1};
|
||||
auto op_name = op_desc.Type() + ":" + op_desc.Output("Out").front();
|
||||
engine_->AddOp(op_name, "Reshape", {input}, {output});
|
||||
engine_->AddOpAttr<PTuple<int>>(op_name, "dims", out_dims);
|
||||
}
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
||||
|
||||
REGISTER_ANAKIN_OP_CONVERTER(flatten, FlattenOpConverter);
|
@ -0,0 +1,35 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "paddle/fluid/inference/anakin/convert/op_converter.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
class FlattenOpConverter : public AnakinOpConverter {
|
||||
public:
|
||||
FlattenOpConverter() = default;
|
||||
|
||||
virtual void operator()(const framework::proto::OpDesc &op,
|
||||
const framework::Scope &scope,
|
||||
bool test_mode) override;
|
||||
virtual ~FlattenOpConverter() {}
|
||||
};
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
@ -0,0 +1,52 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "paddle/fluid/inference/anakin/convert/reshape.h"
|
||||
#include <vector>
|
||||
|
||||
using anakin::graph::GraphGlobalMem;
|
||||
using anakin::AK_FLOAT;
|
||||
using anakin::saber::NV;
|
||||
using anakin::saber::Shape;
|
||||
using anakin::PTuple;
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
void ReshapeOpConverter::operator()(const framework::proto::OpDesc &op,
|
||||
const framework::Scope &scope,
|
||||
bool test_mode) {
|
||||
framework::OpDesc op_desc(op, nullptr);
|
||||
PADDLE_ENFORCE_EQ(op_desc.Input("X").size(), 1UL);
|
||||
PADDLE_ENFORCE_EQ(op_desc.Output("Out").size(), 1UL);
|
||||
|
||||
auto input = op_desc.Input("X").front();
|
||||
auto output = op_desc.Output("Out").front();
|
||||
|
||||
auto op_name = op_desc.Type() + ":" + op_desc.Output("Out").front();
|
||||
engine_->AddOp(op_name, "Reshape", {input}, {output});
|
||||
|
||||
auto shape = boost::get<std::vector<int>>(op_desc.GetAttr("shape"));
|
||||
if (shape.size() < 4) {
|
||||
shape.insert(shape.end(), 4 - shape.size(), 1);
|
||||
}
|
||||
engine_->AddOpAttr<PTuple<int>>(op_name, "dims", shape);
|
||||
}
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
||||
|
||||
REGISTER_ANAKIN_OP_CONVERTER(reshape, ReshapeOpConverter);
|
@ -0,0 +1,35 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "paddle/fluid/inference/anakin/convert/op_converter.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
class ReshapeOpConverter : public AnakinOpConverter {
|
||||
public:
|
||||
ReshapeOpConverter() = default;
|
||||
|
||||
virtual void operator()(const framework::proto::OpDesc &op,
|
||||
const framework::Scope &scope,
|
||||
bool test_mode) override;
|
||||
virtual ~ReshapeOpConverter() {}
|
||||
};
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
@ -0,0 +1,43 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "paddle/fluid/inference/anakin/convert/softmax.h"
|
||||
|
||||
using anakin::graph::GraphGlobalMem;
|
||||
using anakin::AK_FLOAT;
|
||||
using anakin::saber::NV;
|
||||
using anakin::saber::Shape;
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
void SoftMaxOpConverter::operator()(const framework::proto::OpDesc &op,
|
||||
const framework::Scope &scope,
|
||||
bool test_mode) {
|
||||
framework::OpDesc op_desc(op, nullptr);
|
||||
PADDLE_ENFORCE_EQ(op_desc.Input("X").size(), 1UL);
|
||||
|
||||
auto input = op_desc.Input("X").front();
|
||||
auto output = op_desc.Output("Out").front();
|
||||
auto op_name = op_desc.Type() + ":" + op_desc.Output("Out").front();
|
||||
engine_->AddOp(op_name, "Softmax", {input}, {output});
|
||||
engine_->AddOpAttr(op_name, "axis", 1);
|
||||
}
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
||||
|
||||
REGISTER_ANAKIN_OP_CONVERTER(softmax, SoftMaxOpConverter);
|
@ -0,0 +1,35 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "paddle/fluid/inference/anakin/convert/op_converter.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
class SoftMaxOpConverter : public AnakinOpConverter {
|
||||
public:
|
||||
SoftMaxOpConverter() = default;
|
||||
|
||||
virtual void operator()(const framework::proto::OpDesc &op,
|
||||
const framework::Scope &scope,
|
||||
bool test_mode) override;
|
||||
virtual ~SoftMaxOpConverter() {}
|
||||
};
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
@ -0,0 +1,71 @@
|
||||
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include "paddle/fluid/inference/anakin/convert/ut_helper.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
TEST(batch_norm_op, test) {
|
||||
std::unordered_set<std::string> parameters(
|
||||
{"batch_norm_scale", "batch_norm_bias", "batch_norm_mean",
|
||||
"batch_norm_variance"});
|
||||
framework::Scope scope;
|
||||
AnakinConvertValidation validator(parameters, scope);
|
||||
std::vector<int> param_shape{2};
|
||||
|
||||
validator.DeclInputVar("batch_norm_X", {1, 2, 5, 5});
|
||||
validator.DeclParamVar("batch_norm_scale", param_shape);
|
||||
validator.DeclParamVar("batch_norm_bias", param_shape);
|
||||
validator.DeclParamVar("batch_norm_mean", param_shape);
|
||||
validator.DeclParamVar("batch_norm_variance", param_shape);
|
||||
validator.DeclOutputVar("batch_norm_Y", {1, 2, 5, 5});
|
||||
validator.DeclOutputVar("batch_norm_save_mean", param_shape);
|
||||
validator.DeclOutputVar("batch_norm_save_variance", param_shape);
|
||||
|
||||
// Prepare Op description
|
||||
framework::OpDesc desc;
|
||||
|
||||
desc.SetType("batch_norm");
|
||||
desc.SetInput("X", {"batch_norm_X"});
|
||||
desc.SetInput("Scale", {"batch_norm_scale"});
|
||||
desc.SetInput("Bias", {"batch_norm_bias"});
|
||||
desc.SetInput("Mean", {"batch_norm_mean"});
|
||||
desc.SetInput("Variance", {"batch_norm_variance"});
|
||||
desc.SetOutput("Y", {"batch_norm_Y"});
|
||||
desc.SetOutput("MeanOut", {"batch_norm_mean"});
|
||||
desc.SetOutput("VarianceOut", {"batch_norm_variance"});
|
||||
desc.SetOutput("SavedMean", {"batch_norm_save_mean"});
|
||||
desc.SetOutput("SavedVariance", {"batch_norm_save_variance"});
|
||||
|
||||
float eps = 1e-5f;
|
||||
desc.SetAttr("epsilon", eps);
|
||||
desc.SetAttr("is_test", true);
|
||||
// desc.SetAttr("momentum", 0.8f);
|
||||
|
||||
validator.SetOp(*desc.Proto());
|
||||
|
||||
std::unordered_set<std::string> neglected_output = {
|
||||
"batch_norm_save_mean", "batch_norm_save_variance", "batch_norm_mean",
|
||||
"batch_norm_variance"};
|
||||
validator.Execute(1, neglected_output);
|
||||
}
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
||||
USE_OP(batch_norm);
|
||||
USE_ANAKIN_CONVERTER(batch_norm);
|
@ -0,0 +1,51 @@
|
||||
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include "paddle/fluid/inference/anakin/convert/op_converter.h"
|
||||
#include "paddle/fluid/inference/anakin/convert/ut_helper.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
TEST(flatten_op, test) {
|
||||
auto *converter = Registry<AnakinOpConverter>::Global().Lookup("flatten");
|
||||
ASSERT_TRUE(converter);
|
||||
|
||||
std::unordered_set<std::string> parameters;
|
||||
framework::Scope scope;
|
||||
AnakinConvertValidation validator(parameters, scope);
|
||||
validator.DeclInputVar("flatten-X", {3, 100, 100, 4});
|
||||
validator.DeclOutputVar("flatten-Out", {1, 300, 400, 1});
|
||||
framework::OpDesc desc;
|
||||
desc.SetType("flatten");
|
||||
desc.SetInput("X", {"flatten-X"});
|
||||
desc.SetOutput("Out", {"flatten-Out"});
|
||||
desc.SetAttr("axis", 2);
|
||||
|
||||
LOG(INFO) << "set OP";
|
||||
validator.SetOp(*desc.Proto());
|
||||
LOG(INFO) << "execute";
|
||||
|
||||
validator.Execute(5);
|
||||
}
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
||||
|
||||
USE_OP(reshape);
|
||||
USE_OP_ITSELF(flatten);
|
||||
USE_ANAKIN_CONVERTER(flatten);
|
@ -0,0 +1,53 @@
|
||||
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include "paddle/fluid/inference/anakin/convert/op_converter.h"
|
||||
#include "paddle/fluid/inference/anakin/convert/ut_helper.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
TEST(reshape, test) {
|
||||
auto* converter = Registry<AnakinOpConverter>::Global().Lookup("reshape");
|
||||
ASSERT_TRUE(converter);
|
||||
framework::Scope scope;
|
||||
std::unordered_set<std::string> parameters;
|
||||
AnakinConvertValidation validator(parameters, scope);
|
||||
|
||||
// validator.DeclInputVar("reshape-X", {2, 3, 3, 1});
|
||||
// validator.DeclOutputVar("reshape-Out", {3, 2, 1, 3});
|
||||
validator.DeclInputVar("reshape-X", {1, 2, 4, 1});
|
||||
validator.DeclOutputVar("reshape-Out", {1, 8, 1, 1});
|
||||
|
||||
framework::OpDesc desc;
|
||||
desc.SetType("reshape");
|
||||
desc.SetInput("X", {"reshape-X"});
|
||||
desc.SetOutput("Out", {"reshape-Out"});
|
||||
// desc.SetAttr("shape", std::vector<int>({3, 2, 1, 3}));
|
||||
desc.SetAttr("shape", std::vector<int>({1, 8, 1, 1}));
|
||||
|
||||
LOG(INFO) << "set OP";
|
||||
validator.SetOp(*desc.Proto());
|
||||
LOG(INFO) << "execute";
|
||||
validator.Execute(1);
|
||||
}
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
||||
|
||||
USE_OP(reshape);
|
||||
USE_ANAKIN_CONVERTER(reshape);
|
@ -0,0 +1,50 @@
|
||||
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include "paddle/fluid/inference/anakin/convert/op_converter.h"
|
||||
#include "paddle/fluid/inference/anakin/convert/ut_helper.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
TEST(softmax, test) {
|
||||
auto* converter = Registry<AnakinOpConverter>::Global().Lookup("softmax");
|
||||
ASSERT_TRUE(converter);
|
||||
framework::Scope scope;
|
||||
std::unordered_set<std::string> parameters;
|
||||
AnakinConvertValidation validator(parameters, scope);
|
||||
|
||||
std::vector<int> tensor_shape{8, 10};
|
||||
validator.DeclInputVar("softmax-X", {1, 10, 1, 1});
|
||||
validator.DeclOutputVar("softmax-Out", {1, 10, 1, 1});
|
||||
|
||||
framework::OpDesc desc;
|
||||
desc.SetType("softmax");
|
||||
desc.SetInput("X", {"softmax-X"});
|
||||
desc.SetOutput("Out", {"softmax-Out"});
|
||||
|
||||
LOG(INFO) << "set OP";
|
||||
validator.SetOp(*desc.Proto());
|
||||
LOG(INFO) << "execute";
|
||||
validator.Execute(1);
|
||||
}
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
||||
|
||||
USE_OP(softmax);
|
||||
USE_ANAKIN_CONVERTER(softmax);
|
@ -0,0 +1,51 @@
|
||||
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License. */
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include "paddle/fluid/inference/anakin/convert/op_converter.h"
|
||||
#include "paddle/fluid/inference/anakin/convert/ut_helper.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
TEST(transpose_op, test) {
|
||||
auto* converter = Registry<AnakinOpConverter>::Global().Lookup("transpose");
|
||||
ASSERT_TRUE(converter != nullptr);
|
||||
std::unordered_set<std::string> parameters;
|
||||
framework::Scope scope;
|
||||
AnakinConvertValidation validator(parameters, scope);
|
||||
validator.DeclInputVar("transpose-X", {2, 3, 4, 5});
|
||||
validator.DeclOutputVar("transpose-Out", {4, 2, 5, 3});
|
||||
|
||||
// Prepare Op description
|
||||
framework::OpDesc desc;
|
||||
desc.SetType("transpose");
|
||||
desc.SetInput("X", {"transpose-X"});
|
||||
desc.SetOutput("Out", {"transpose-Out"});
|
||||
desc.SetAttr("axis", std::vector<int>({2, 0, 3, 1}));
|
||||
|
||||
LOG(INFO) << "set OP";
|
||||
validator.SetOp(*desc.Proto());
|
||||
LOG(INFO) << "execute";
|
||||
|
||||
validator.Execute(3);
|
||||
}
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
||||
|
||||
USE_OP(transpose);
|
||||
USE_ANAKIN_CONVERTER(transpose);
|
@ -0,0 +1,50 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "paddle/fluid/inference/anakin/convert/transpose.h"
|
||||
#include <algorithm>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
using anakin::graph::GraphGlobalMem;
|
||||
using anakin::AK_FLOAT;
|
||||
using anakin::saber::NV;
|
||||
using anakin::saber::Shape;
|
||||
using anakin::PTuple;
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
void TransposeOpConverter::operator()(const framework::proto::OpDesc &op,
|
||||
const framework::Scope &scope,
|
||||
bool test_mode) {
|
||||
framework::OpDesc op_desc(op, nullptr);
|
||||
PADDLE_ENFORCE_EQ(op_desc.Input("X").size(), 1);
|
||||
PADDLE_ENFORCE_EQ(op_desc.Output("Out").size(), 1);
|
||||
|
||||
auto input = op_desc.Input("X").front();
|
||||
auto output = op_desc.Output("Out").front();
|
||||
auto op_name = op_desc.Type() + ":" + op_desc.Output("Out").front();
|
||||
engine_->AddOp(op_name, "Permute", {input}, {output});
|
||||
|
||||
auto axis = boost::get<std::vector<int>>(op_desc.GetAttr("axis"));
|
||||
engine_->AddOpAttr<PTuple<int>>(op_name, "dims", axis);
|
||||
}
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
||||
|
||||
REGISTER_ANAKIN_OP_CONVERTER(transpose, TransposeOpConverter);
|
@ -0,0 +1,35 @@
|
||||
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "paddle/fluid/inference/anakin/convert/op_converter.h"
|
||||
|
||||
namespace paddle {
|
||||
namespace inference {
|
||||
namespace anakin {
|
||||
|
||||
class TransposeOpConverter : public AnakinOpConverter {
|
||||
public:
|
||||
TransposeOpConverter() = default;
|
||||
|
||||
virtual void operator()(const framework::proto::OpDesc &op,
|
||||
const framework::Scope &scope,
|
||||
bool test_mode) override;
|
||||
virtual ~TransposeOpConverter() {}
|
||||
};
|
||||
|
||||
} // namespace anakin
|
||||
} // namespace inference
|
||||
} // namespace paddle
|
Loading…
Reference in new issue