test=develop
revert-14398-imperative
Xin Pan 6 years ago
parent b4db31bad0
commit 0492158da5

@ -293,7 +293,6 @@ class Variable(core.VarBase):
if is_new_var: if is_new_var:
self.desc.set_type(type) self.desc.set_type(type)
elif self.desc.type() != type: elif self.desc.type() != type:
# sys.stderr.write('%s vs %s\n' % (self.desc.type(), type))
raise ValueError("Variable {0} has been created before. The " raise ValueError("Variable {0} has been created before. The "
"previous type is {1}; the new type is {2}. They" "previous type is {1}; the new type is {2}. They"
" are not matched".format(self.name, " are not matched".format(self.name,
@ -358,16 +357,16 @@ class Variable(core.VarBase):
self.stop_gradient = stop_gradient self.stop_gradient = stop_gradient
self.is_data = is_data self.is_data = is_data
def numpy(self): def _numpy(self):
scope = _imperative_tracer().get_scope(self.block.desc) scope = _imperative_tracer().get_scope(self.block.desc)
tensor = core.get_variable_tensor(scope, self.desc.name()) tensor = core.get_variable_tensor(scope, self.desc.name())
return np.array(tensor) return np.array(tensor)
def backward(self): def _backward(self):
scope = _imperative_tracer().get_scope(self.block.desc) scope = _imperative_tracer().get_scope(self.block.desc)
self._run_backward(scope) self._run_backward(scope)
def grad(self): def _gradient(self):
return np.array(self._grad()) return np.array(self._grad())
def __str__(self): def __str__(self):

@ -35,13 +35,8 @@ class PyLayer(core.Layer):
var_inputs = [] var_inputs = []
for x in inputs: for x in inputs:
if isinstance(x, np.ndarray): py_var = base.to_variable(x)
py_var = base.to_variable(x) var_inputs.append(py_var)
var_inputs.append(py_var)
elif isinstance(x, framework.Variable):
var_inputs.append(x)
else:
raise ValueError("not var or ndarray %s" % type(x))
outputs = self.forward(var_inputs) outputs = self.forward(var_inputs)
return outputs return outputs

@ -49,23 +49,8 @@ class LayerHelper(object):
def startup_program(self): def startup_program(self):
return default_startup_program() return default_startup_program()
def _np_to_variable(self, x):
tensor = core.LoDTensor()
tensor.set(x, core.CPUPlace())
return Variable(
self.main_program.current_block(),
type=core.VarDesc.VarType.LOD_TENSOR,
name=None,
shape=x.shape,
dtype=x.dtype)
def to_variable(self, x): def to_variable(self, x):
if isinstance(x, Variable): return base.to_variable(x, self.main_program.current_block())
return x
elif isinstance(x, np.ndarray):
return base.to_variable(x, self.main_program.current_block())
else:
raise ValueError("inputs wrong type %s\n" % x)
def append_op(self, *args, **kwargs): def append_op(self, *args, **kwargs):
return self.main_program.current_block().append_op(*args, **kwargs) return self.main_program.current_block().append_op(*args, **kwargs)

@ -43,9 +43,9 @@ class TestImperative(unittest.TestCase):
l = MyLayer() l = MyLayer()
x = l(np.array([1.0, 2.0, -1.0], dtype=np.float32))[0] x = l(np.array([1.0, 2.0, -1.0], dtype=np.float32))[0]
self.assertIsNotNone(x) self.assertIsNotNone(x)
sys.stderr.write("%s output: %s\n" % (x, x.numpy())) sys.stderr.write("%s output: %s\n" % (x, x._numpy()))
x.backward() x._backward()
sys.stderr.write("grad %s\n" % l._x_for_debug.grad()) sys.stderr.write("grad %s\n" % l._x_for_debug._gradient())
if __name__ == '__main__': if __name__ == '__main__':

Loading…
Cancel
Save