modify trace api test=develop (#25397)

fix_copy_if_different
yaoxuefeng 5 years ago committed by GitHub
parent f9ac5fb992
commit aaa7cbd56f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -30,8 +30,8 @@ class TraceOp : public framework::OperatorWithKernel {
ctx->HasOutput("Out"), true,
platform::errors::NotFound("Output of TraceOp is not found."));
int dim1 = ctx->Attrs().Get<int>("dim1");
int dim2 = ctx->Attrs().Get<int>("dim2");
int dim1 = ctx->Attrs().Get<int>("axis1");
int dim2 = ctx->Attrs().Get<int>("axis2");
auto x_dims = ctx->GetInputDim("Input");
@ -84,15 +84,15 @@ class TraceOpMaker : public framework::OpProtoAndCheckerMaker {
)DOC")
.SetDefault(0);
AddAttr<int>(
"dim1",
R"DOC((int, default 0), the first dim of the 2-D planes from which the diagonals should be taken.
Can be both positive and negative. Default: 0.
"axis1",
R"DOC((int, default 0), the first axis of the 2-D planes from which the diagonals should be taken.
Can be either positive or negative. Default: 0.
)DOC")
.SetDefault(-2);
AddAttr<int>(
"dim2",
R"DOC((int, default 1), the second dim of the 2-D planes from which the diagonals should be taken.
Can be both positive and negative. Default: 1.
"axis2",
R"DOC((int, default 1), the second axis of the 2-D planes from which the diagonals should be taken.
Can be either positive or negative. Default: 1.
)DOC")
.SetDefault(-1);
AddComment(R"DOC(

@ -33,8 +33,8 @@ class TraceCUDAKernel : public framework::OpKernel<T> {
auto* out = context.Output<framework::Tensor>("Out");
const int64_t offset = context.Attr<int>("offset");
const int64_t dim1 = context.Attr<int>("dim1");
const int64_t dim2 = context.Attr<int>("dim2");
const int64_t dim1 = context.Attr<int>("axis1");
const int64_t dim2 = context.Attr<int>("axis2");
T* out_data = out->mutable_data<T>(context.GetPlace());
const framework::Tensor diag =

@ -174,8 +174,8 @@ class TraceKernel : public framework::OpKernel<T> {
auto* out = context.Output<framework::Tensor>("Out");
const int64_t offset = context.Attr<int>("offset");
const int64_t dim1 = context.Attr<int>("dim1");
const int64_t dim2 = context.Attr<int>("dim2");
const int64_t dim1 = context.Attr<int>("axis1");
const int64_t dim2 = context.Attr<int>("axis2");
auto output_dims = out->dims();
@ -205,8 +205,8 @@ class TraceGradKernel : public framework::OpKernel<T> {
context.Output<framework::Tensor>(framework::GradVarName("Input"));
int64_t offset = context.Attr<int>("offset");
int64_t dim1 = context.Attr<int>("dim1");
int64_t dim2 = context.Attr<int>("dim2");
int64_t dim1 = context.Attr<int>("axis1");
int64_t dim2 = context.Attr<int>("axis2");
auto input_dims = d_x->dims();
auto input_stride = framework::stride(input_dims);

@ -33,7 +33,7 @@ class TestComplexTraceLayer(unittest.TestCase):
for place in self._places:
with dg.guard(place):
var_x = dg.to_variable(input)
result = cpx.trace(var_x, offset=1, dim1=0, dim2=2).numpy()
result = cpx.trace(var_x, offset=1, axis1=0, axis2=2).numpy()
target = np.trace(input, offset=1, axis1=0, axis2=2)
self.assertTrue(np.allclose(result, target))

@ -38,7 +38,7 @@ class TestTraceOp(OpTest):
def init_config(self):
self.case = np.random.randn(20, 6).astype('float64')
self.inputs = {'Input': self.case}
self.attrs = {'offset': 0, 'dim1': 0, 'dim2': 1}
self.attrs = {'offset': 0, 'axis1': 0, 'axis2': 1}
self.target = np.trace(self.inputs['Input'])
@ -46,24 +46,24 @@ class TestTraceOpCase1(TestTraceOp):
def init_config(self):
self.case = np.random.randn(2, 20, 2, 3).astype('float32')
self.inputs = {'Input': self.case}
self.attrs = {'offset': 1, 'dim1': 0, 'dim2': 2}
self.attrs = {'offset': 1, 'axis1': 0, 'axis2': 2}
self.target = np.trace(
self.inputs['Input'],
offset=self.attrs['offset'],
axis1=self.attrs['dim1'],
axis2=self.attrs['dim2'])
axis1=self.attrs['axis1'],
axis2=self.attrs['axis2'])
class TestTraceOpCase2(TestTraceOp):
def init_config(self):
self.case = np.random.randn(2, 20, 2, 3).astype('float32')
self.inputs = {'Input': self.case}
self.attrs = {'offset': -5, 'dim1': 1, 'dim2': -1}
self.attrs = {'offset': -5, 'axis1': 1, 'axis2': -1}
self.target = np.trace(
self.inputs['Input'],
offset=self.attrs['offset'],
axis1=self.attrs['dim1'],
axis2=self.attrs['dim2'])
axis1=self.attrs['axis1'],
axis2=self.attrs['axis2'])
class TestTraceAPICase(unittest.TestCase):
@ -71,7 +71,7 @@ class TestTraceAPICase(unittest.TestCase):
case = np.random.randn(2, 20, 2, 3).astype('float32')
data1 = fluid.data(name='data1', shape=[2, 20, 2, 3], dtype='float32')
out1 = tensor.trace(data1)
out2 = tensor.trace(data1, offset=-5, dim1=1, dim2=-1)
out2 = tensor.trace(data1, offset=-5, axis1=1, axis2=-1)
place = core.CPUPlace()
exe = fluid.Executor(place)

@ -236,39 +236,38 @@ def elementwise_div(x, y, axis=-1, name=None):
name=name)
def trace(input, offset=0, dim1=0, dim2=1, name=None):
def trace(x, offset=0, axis1=0, axis2=1, name=None):
"""
The layer to compute the trace for a complex number tensor. input :attr:`input` must be a ComplexVariable.
The layer to compute the trace for a complex number tensor. x :attr:`x` must be a ComplexVariable.
See the detailed description for the function and other arguments
in :ref:`api_tensor_math_trace` .
Args:
input(ComplexVariable): The input ComplexVariable. Must be at least 2-dimensional.
x(ComplexVariable): The input ComplexVariable x. Must be at least 2-dimensional.
The supported data types include complex64 and complex128.
offset(int, optional): Which diagonals in input tensor will be taken. Default: 0 (main diagonals).
dim1(int, optional): The first dimension with respect to take diagonal. Default: 0.
dim2(int, optional): The second dimension with respect to take diagonal. Default: 1.
offset(int, optional): Which diagonals in input tensor x will be taken. Default: 0 (main diagonals).
axis1(int, optional): The first axis with respect to take diagonal. Default: 0.
axis2(int, optional): The second axis with respect to take diagonal. Default: 1.
name (str, optional): Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name`. Default: None.
Returns:
ComplexVariable: The trace result of input tensor, it's data type is the same as input data type.
ComplexVariable: The trace result of input tensor x, it's data type is the same as input data type.
Examples:
.. code-block:: python
import paddle
import paddle.fluid.dygraph as dg
import numpy as np
case1 = np.random.randn(3, 10, 10).astype('float64') + 1j * np.random.randn(3, 10, 10).astype('float64')
with dg.guard():
case1 = dg.to_variable(case1)
data1 = paddle.complex.trace(case1, offset=1, dim1=1, dim2=2) # data1.shape = [3]
paddle.enable_imperative()
case1 = paddle.imperative.to_variable(case1)
data1 = paddle.complex.trace(case1, offset=1, axis1=1, axis2=2) # data1.shape = [3]
"""
complex_variable_exists([input], "trace")
real = math.trace(input.real, offset, dim1, dim2, name)
imag = math.trace(input.imag, offset, dim1, dim2, name)
complex_variable_exists([x], "trace")
real = math.trace(x.real, offset, axis1, axis2, name)
imag = math.trace(x.imag, offset, axis1, axis2, name)
return ComplexVariable(real, imag)

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save