|
|
|
@ -1,6 +1,6 @@
|
|
|
|
|
import unittest
|
|
|
|
|
import numpy as np
|
|
|
|
|
from gradient_checker import GradientChecker, create_op
|
|
|
|
|
from gradient_checker import GradientChecker, Operator
|
|
|
|
|
from op_test_util import OpTestMeta
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -9,19 +9,16 @@ class TestReshapeOp(unittest.TestCase):
|
|
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
|
self.type = "reshape"
|
|
|
|
|
self.inputs = {'X': np.random.random((2, 4)).astype("float32"), }
|
|
|
|
|
print self.inputs
|
|
|
|
|
self.attrs = {'shape': [4, 2]}
|
|
|
|
|
self.inputs = {'X': np.random.random((37, 51)).astype("float32"), }
|
|
|
|
|
self.attrs = {'shape': [51, 37]}
|
|
|
|
|
self.outputs = {'Out': self.inputs['X'].reshape(self.attrs['shape'])}
|
|
|
|
|
print self.outputs
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class ReshapeGradOpTest(GradientChecker):
|
|
|
|
|
def test_normal(self):
|
|
|
|
|
op = create_op("reshape")
|
|
|
|
|
inputs = {"X": np.random.random((2, 4)).astype("float32")}
|
|
|
|
|
attrs = {'shape': [4, 2]}
|
|
|
|
|
self.check_grad(op, inputs, attrs, set("X"), "Out")
|
|
|
|
|
op = Operator("reshape", X='X', Out='Out', shape=[5, 40])
|
|
|
|
|
inputs = {"X": np.random.random((10, 20)).astype("float32")}
|
|
|
|
|
self.check_grad(op, inputs, set("X"), "Out")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|