|
|
@ -25,18 +25,19 @@ class PadConstantLikeOp : public framework::OperatorWithKernel {
|
|
|
|
using framework::OperatorWithKernel::OperatorWithKernel;
|
|
|
|
using framework::OperatorWithKernel::OperatorWithKernel;
|
|
|
|
|
|
|
|
|
|
|
|
void InferShape(framework::InferShapeContext *ctx) const override {
|
|
|
|
void InferShape(framework::InferShapeContext *ctx) const override {
|
|
|
|
PADDLE_ENFORCE(ctx->HasInput("X"),
|
|
|
|
OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "PadConstantLike");
|
|
|
|
"Input(X) of PadConstantLikeOp should not be null.");
|
|
|
|
OP_INOUT_CHECK(ctx->HasInput("Y"), "Input", "Y", "PadConstantLike");
|
|
|
|
PADDLE_ENFORCE(ctx->HasInput("Y"),
|
|
|
|
OP_INOUT_CHECK(ctx->HasOutput("Out"), "Output", "Out", "PadConstantLike");
|
|
|
|
"Input(Y) of PadConstantLikeOp should not be null.");
|
|
|
|
|
|
|
|
PADDLE_ENFORCE(ctx->HasOutput("Out"),
|
|
|
|
|
|
|
|
"Output(Out) of PadConstantLikeOp should not be null.");
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
auto x_dim = ctx->GetInputDim("X");
|
|
|
|
auto x_dim = ctx->GetInputDim("X");
|
|
|
|
auto y_dim = ctx->GetInputDim("Y");
|
|
|
|
auto y_dim = ctx->GetInputDim("Y");
|
|
|
|
|
|
|
|
|
|
|
|
PADDLE_ENFORCE_EQ(x_dim.size(), y_dim.size(),
|
|
|
|
PADDLE_ENFORCE_EQ(x_dim.size(), y_dim.size(),
|
|
|
|
"The dimension of X and Y should be the same.");
|
|
|
|
platform::errors::InvalidArgument(
|
|
|
|
|
|
|
|
"The size of Input(X)'s dimension and the size of "
|
|
|
|
|
|
|
|
"Input(Y)'s dimension should be the same, but "
|
|
|
|
|
|
|
|
"received %d for Input(X) vs %d for Input(Y).",
|
|
|
|
|
|
|
|
x_dim.size(), y_dim.size()));
|
|
|
|
|
|
|
|
|
|
|
|
for (int i = 0; i < x_dim.size(); ++i) {
|
|
|
|
for (int i = 0; i < x_dim.size(); ++i) {
|
|
|
|
if ((!ctx->IsRuntime()) && ((x_dim[i] == -1) || (y_dim[i] == -1))) {
|
|
|
|
if ((!ctx->IsRuntime()) && ((x_dim[i] == -1) || (y_dim[i] == -1))) {
|
|
|
@ -44,8 +45,11 @@ class PadConstantLikeOp : public framework::OperatorWithKernel {
|
|
|
|
} else {
|
|
|
|
} else {
|
|
|
|
PADDLE_ENFORCE_GE(
|
|
|
|
PADDLE_ENFORCE_GE(
|
|
|
|
x_dim[i], y_dim[i],
|
|
|
|
x_dim[i], y_dim[i],
|
|
|
|
"expected X_dim[i] >= Y_dim[i], but received %d < %d for dim %d",
|
|
|
|
platform::errors::InvalidArgument(
|
|
|
|
x_dim[i], y_dim[i], i);
|
|
|
|
"The size of each dimension of Input(X) expected to be greater "
|
|
|
|
|
|
|
|
"than or equal to size of corresponding dimension of Input(Y) "
|
|
|
|
|
|
|
|
"(X_dim[i] >= Y_dim[i]), but received %d < %d for dimension %d",
|
|
|
|
|
|
|
|
x_dim[i], y_dim[i], i));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
@ -157,14 +161,20 @@ class PadConstantLikeOpGrad : public framework::OperatorWithKernel {
|
|
|
|
using framework::OperatorWithKernel::OperatorWithKernel;
|
|
|
|
using framework::OperatorWithKernel::OperatorWithKernel;
|
|
|
|
|
|
|
|
|
|
|
|
void InferShape(framework::InferShapeContext *ctx) const override {
|
|
|
|
void InferShape(framework::InferShapeContext *ctx) const override {
|
|
|
|
PADDLE_ENFORCE(ctx->HasInput("Y"), "Input(Y) should not be null");
|
|
|
|
OP_INOUT_CHECK(ctx->HasInput("Y"), "Input", "Y", "PadConstantLike@Grad");
|
|
|
|
PADDLE_ENFORCE(ctx->HasInput(framework::GradVarName("Out")),
|
|
|
|
OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Out")), "Input",
|
|
|
|
"Input(Out@GRAD) should not be null");
|
|
|
|
framework::GradVarName("Out"), "PadConstantLike@Grad");
|
|
|
|
|
|
|
|
|
|
|
|
auto y_dim = ctx->GetInputDim("Y");
|
|
|
|
auto y_dim = ctx->GetInputDim("Y");
|
|
|
|
auto dout_dim = ctx->GetInputDim(framework::GradVarName("Out"));
|
|
|
|
auto dout_dim = ctx->GetInputDim(framework::GradVarName("Out"));
|
|
|
|
|
|
|
|
|
|
|
|
PADDLE_ENFORCE_EQ(dout_dim.size(), y_dim.size(),
|
|
|
|
PADDLE_ENFORCE_EQ(
|
|
|
|
"The dimension of X and Y should be the same.");
|
|
|
|
dout_dim.size(), y_dim.size(),
|
|
|
|
|
|
|
|
platform::errors::InvalidArgument(
|
|
|
|
|
|
|
|
"Op(PadConstantLike@Grad) the size of Input(Out@Grad)'s dimension "
|
|
|
|
|
|
|
|
"and the size of Input(Y)'s dimension should be the same, but "
|
|
|
|
|
|
|
|
"received %d for Input(Out@Grad) vs %d for Input(Y).",
|
|
|
|
|
|
|
|
dout_dim.size(), y_dim.size()));
|
|
|
|
|
|
|
|
|
|
|
|
auto y_grad_name = framework::GradVarName("Y");
|
|
|
|
auto y_grad_name = framework::GradVarName("Y");
|
|
|
|
if (ctx->HasOutput(y_grad_name)) {
|
|
|
|
if (ctx->HasOutput(y_grad_name)) {
|
|
|
@ -175,10 +185,14 @@ class PadConstantLikeOpGrad : public framework::OperatorWithKernel {
|
|
|
|
if ((!ctx->IsRuntime()) && ((dout_dim[i] == -1) || (y_dim[i] == -1))) {
|
|
|
|
if ((!ctx->IsRuntime()) && ((dout_dim[i] == -1) || (y_dim[i] == -1))) {
|
|
|
|
continue;
|
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
} else {
|
|
|
|
PADDLE_ENFORCE_GE(dout_dim[i], y_dim[i],
|
|
|
|
PADDLE_ENFORCE_GE(
|
|
|
|
"expected Out_dim[i] >= Y_dim[i], but received %d "
|
|
|
|
dout_dim[i], y_dim[i],
|
|
|
|
"< %d for dim %d",
|
|
|
|
platform::errors::InvalidArgument(
|
|
|
|
dout_dim[i], y_dim[i], i);
|
|
|
|
"The size of each dimension of Input(Out@Grad) expected to "
|
|
|
|
|
|
|
|
"be greater than or equal to size of corresponding dimension "
|
|
|
|
|
|
|
|
"of Input(Y) (Out_dim[i] >= Y_dim[i]), but received %d < %d "
|
|
|
|
|
|
|
|
"for dimension %d",
|
|
|
|
|
|
|
|
dout_dim[i], y_dim[i], i));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|