|
|
|
@ -27,27 +27,15 @@ void assign_cpu_kernel(const data_t* x_data,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<paddle::Tensor> AttrTestForward(
|
|
|
|
|
const paddle::Tensor& x,
|
|
|
|
|
bool bool_attr,
|
|
|
|
|
int int_attr,
|
|
|
|
|
float float_attr,
|
|
|
|
|
int64_t int64_attr,
|
|
|
|
|
std::string str_attr,
|
|
|
|
|
std::vector<int> int_vec_attr,
|
|
|
|
|
std::vector<float> float_vec_attr,
|
|
|
|
|
std::vector<int64_t> int64_vec_attr,
|
|
|
|
|
std::vector<std::string> str_vec_attr) {
|
|
|
|
|
auto out = paddle::Tensor(paddle::PlaceType::kCPU);
|
|
|
|
|
out.reshape(x.shape());
|
|
|
|
|
|
|
|
|
|
PD_DISPATCH_FLOATING_TYPES(
|
|
|
|
|
x.type(), "assign_cpu_kernel", ([&] {
|
|
|
|
|
assign_cpu_kernel<data_t>(
|
|
|
|
|
x.data<data_t>(), out.mutable_data<data_t>(), x.size());
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
// Check attrs value
|
|
|
|
|
void CheckAllForwardAttrs(const bool& bool_attr,
|
|
|
|
|
const int& int_attr,
|
|
|
|
|
const float& float_attr,
|
|
|
|
|
const int64_t& int64_attr,
|
|
|
|
|
const std::string& str_attr,
|
|
|
|
|
const std::vector<int>& int_vec_attr,
|
|
|
|
|
const std::vector<float>& float_vec_attr,
|
|
|
|
|
const std::vector<int64_t>& int64_vec_attr,
|
|
|
|
|
const std::vector<std::string>& str_vec_attr) {
|
|
|
|
|
if (bool_attr != true) {
|
|
|
|
|
throw std::runtime_error("bool_attr value error.");
|
|
|
|
|
}
|
|
|
|
@ -103,26 +91,11 @@ std::vector<paddle::Tensor> AttrTestForward(
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return {out};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// The attrs of backward op must be the subset of attrs of forward op
|
|
|
|
|
std::vector<paddle::Tensor> AttrTestBackward(
|
|
|
|
|
const paddle::Tensor& grad_out,
|
|
|
|
|
int int_attr,
|
|
|
|
|
std::vector<float> float_vec_attr,
|
|
|
|
|
std::vector<std::string> str_vec_attr) {
|
|
|
|
|
auto grad_x = paddle::Tensor(paddle::PlaceType::kCPU);
|
|
|
|
|
grad_x.reshape(grad_out.shape());
|
|
|
|
|
|
|
|
|
|
PD_DISPATCH_FLOATING_TYPES(grad_out.type(), "assign_cpu_kernel", ([&] {
|
|
|
|
|
assign_cpu_kernel<data_t>(
|
|
|
|
|
grad_out.data<data_t>(),
|
|
|
|
|
grad_x.mutable_data<data_t>(),
|
|
|
|
|
grad_out.size());
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
void CheckAllBackwardAttrs(const int& int_attr,
|
|
|
|
|
const std::vector<float>& float_vec_attr,
|
|
|
|
|
const std::vector<std::string>& str_vec_attr) {
|
|
|
|
|
if (int_attr != 10) {
|
|
|
|
|
throw std::runtime_error("int_attr value error.");
|
|
|
|
|
}
|
|
|
|
@ -146,6 +119,114 @@ std::vector<paddle::Tensor> AttrTestBackward(
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<paddle::Tensor> AttrTestForward(
|
|
|
|
|
const paddle::Tensor& x,
|
|
|
|
|
bool bool_attr,
|
|
|
|
|
int int_attr,
|
|
|
|
|
float float_attr,
|
|
|
|
|
int64_t int64_attr,
|
|
|
|
|
std::string str_attr,
|
|
|
|
|
std::vector<int> int_vec_attr,
|
|
|
|
|
std::vector<float> float_vec_attr,
|
|
|
|
|
std::vector<int64_t> int64_vec_attr,
|
|
|
|
|
std::vector<std::string> str_vec_attr) {
|
|
|
|
|
auto out = paddle::Tensor(paddle::PlaceType::kCPU);
|
|
|
|
|
out.reshape(x.shape());
|
|
|
|
|
|
|
|
|
|
PD_DISPATCH_FLOATING_TYPES(
|
|
|
|
|
x.type(), "assign_cpu_kernel", ([&] {
|
|
|
|
|
assign_cpu_kernel<data_t>(
|
|
|
|
|
x.data<data_t>(), out.mutable_data<data_t>(), x.size());
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
// Check attrs value
|
|
|
|
|
CheckAllForwardAttrs(bool_attr,
|
|
|
|
|
int_attr,
|
|
|
|
|
float_attr,
|
|
|
|
|
int64_attr,
|
|
|
|
|
str_attr,
|
|
|
|
|
int_vec_attr,
|
|
|
|
|
float_vec_attr,
|
|
|
|
|
int64_vec_attr,
|
|
|
|
|
str_vec_attr);
|
|
|
|
|
|
|
|
|
|
return {out};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// The attrs of backward op must be the subset of attrs of forward op
|
|
|
|
|
std::vector<paddle::Tensor> AttrTestBackward(
|
|
|
|
|
const paddle::Tensor& grad_out,
|
|
|
|
|
int int_attr,
|
|
|
|
|
std::vector<float> float_vec_attr,
|
|
|
|
|
std::vector<std::string> str_vec_attr) {
|
|
|
|
|
auto grad_x = paddle::Tensor(paddle::PlaceType::kCPU);
|
|
|
|
|
grad_x.reshape(grad_out.shape());
|
|
|
|
|
|
|
|
|
|
PD_DISPATCH_FLOATING_TYPES(grad_out.type(), "assign_cpu_kernel", ([&] {
|
|
|
|
|
assign_cpu_kernel<data_t>(
|
|
|
|
|
grad_out.data<data_t>(),
|
|
|
|
|
grad_x.mutable_data<data_t>(),
|
|
|
|
|
grad_out.size());
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
CheckAllBackwardAttrs(int_attr, float_vec_attr, str_vec_attr);
|
|
|
|
|
|
|
|
|
|
return {grad_x};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<paddle::Tensor> ConstAttrTestForward(
|
|
|
|
|
const paddle::Tensor& x,
|
|
|
|
|
const bool& bool_attr,
|
|
|
|
|
const int& int_attr,
|
|
|
|
|
const float& float_attr,
|
|
|
|
|
const int64_t& int64_attr,
|
|
|
|
|
const std::string& str_attr,
|
|
|
|
|
const std::vector<int>& int_vec_attr,
|
|
|
|
|
const std::vector<float>& float_vec_attr,
|
|
|
|
|
const std::vector<int64_t>& int64_vec_attr,
|
|
|
|
|
const std::vector<std::string>& str_vec_attr) {
|
|
|
|
|
auto out = paddle::Tensor(paddle::PlaceType::kCPU);
|
|
|
|
|
out.reshape(x.shape());
|
|
|
|
|
|
|
|
|
|
PD_DISPATCH_FLOATING_TYPES(
|
|
|
|
|
x.type(), "assign_cpu_kernel", ([&] {
|
|
|
|
|
assign_cpu_kernel<data_t>(
|
|
|
|
|
x.data<data_t>(), out.mutable_data<data_t>(), x.size());
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
// Check attrs value
|
|
|
|
|
CheckAllForwardAttrs(bool_attr,
|
|
|
|
|
int_attr,
|
|
|
|
|
float_attr,
|
|
|
|
|
int64_attr,
|
|
|
|
|
str_attr,
|
|
|
|
|
int_vec_attr,
|
|
|
|
|
float_vec_attr,
|
|
|
|
|
int64_vec_attr,
|
|
|
|
|
str_vec_attr);
|
|
|
|
|
|
|
|
|
|
return {out};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// The attrs of backward op must be the subset of attrs of forward op
|
|
|
|
|
std::vector<paddle::Tensor> ConstAttrTestBackward(
|
|
|
|
|
const paddle::Tensor& grad_out,
|
|
|
|
|
const int& int_attr,
|
|
|
|
|
const std::vector<float>& float_vec_attr,
|
|
|
|
|
const std::vector<std::string>& str_vec_attr) {
|
|
|
|
|
auto grad_x = paddle::Tensor(paddle::PlaceType::kCPU);
|
|
|
|
|
grad_x.reshape(grad_out.shape());
|
|
|
|
|
|
|
|
|
|
PD_DISPATCH_FLOATING_TYPES(grad_out.type(), "assign_cpu_kernel", ([&] {
|
|
|
|
|
assign_cpu_kernel<data_t>(
|
|
|
|
|
grad_out.data<data_t>(),
|
|
|
|
|
grad_x.mutable_data<data_t>(),
|
|
|
|
|
grad_out.size());
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
CheckAllBackwardAttrs(int_attr, float_vec_attr, str_vec_attr);
|
|
|
|
|
|
|
|
|
|
return {grad_x};
|
|
|
|
|
}
|
|
|
|
@ -171,3 +252,25 @@ PD_BUILD_GRAD_OP(attr_test)
|
|
|
|
|
"float_vec_attr: std::vector<float>",
|
|
|
|
|
"str_vec_attr: std::vector<std::string>"})
|
|
|
|
|
.SetKernelFn(PD_KERNEL(AttrTestBackward));
|
|
|
|
|
|
|
|
|
|
PD_BUILD_OP(const_attr_test)
|
|
|
|
|
.Inputs({"X"})
|
|
|
|
|
.Outputs({"Out"})
|
|
|
|
|
.Attrs({"bool_attr: bool",
|
|
|
|
|
"int_attr: int",
|
|
|
|
|
"float_attr: float",
|
|
|
|
|
"int64_attr: int64_t",
|
|
|
|
|
"str_attr: std::string",
|
|
|
|
|
"int_vec_attr: std::vector<int>",
|
|
|
|
|
"float_vec_attr: std::vector<float>",
|
|
|
|
|
"int64_vec_attr: std::vector<int64_t>",
|
|
|
|
|
"str_vec_attr: std::vector<std::string>"})
|
|
|
|
|
.SetKernelFn(PD_KERNEL(AttrTestForward));
|
|
|
|
|
|
|
|
|
|
PD_BUILD_GRAD_OP(const_attr_test)
|
|
|
|
|
.Inputs({paddle::Grad("Out")})
|
|
|
|
|
.Outputs({paddle::Grad("X")})
|
|
|
|
|
.Attrs({"int_attr: int",
|
|
|
|
|
"float_vec_attr: std::vector<float>",
|
|
|
|
|
"str_vec_attr: std::vector<std::string>"})
|
|
|
|
|
.SetKernelFn(PD_KERNEL(AttrTestBackward));
|
|
|
|
|