@ -14,6 +14,7 @@
from __future__ import print_function
import os
import numpy as np
import paddle . fluid as fluid
import paddle . fluid . compiler as compiler
@ -21,10 +22,13 @@ import paddle.fluid.core as core
import paddle . fluid . layers as layers
import unittest
from paddle . fluid import ParamAttr
from paddle . fluid . framework import Program , grad_var_name
from paddle . fluid . executor import Executor
from paddle . fluid . backward import append_backward
np . random . seed ( 123 )
os . environ [ " CPU_NUM " ] = " 1 "
fluid . core . _set_eager_deletion_mode ( 0.0 , 1.0 , True )
@ -71,8 +75,8 @@ class PySimpleRNN2(PyRNNBase):
super ( PySimpleRNN2 , self ) . __init__ ( input_shape , output_shape )
seq_len , batch_size , input_dim = input_shape
self . W = np . random. normal ( siz e= ( input_dim , input_dim ) ) . astype ( " float32 " )
self . U = np . random. normal ( siz e= ( input_dim , input_dim ) ) . astype ( " float32 " )
self . W = np . ones( shap e= ( input_dim , input_dim ) ) . astype ( " float32 " )
self . U = np . zeros( shap e= ( input_dim , input_dim ) ) . astype ( " float32 " )
self . h_boot = np . ones ( shape = ( batch_size , input_dim ) ) . astype ( " float32 " )
men_dim = ( seq_len , batch_size , input_dim )
@ -186,7 +190,7 @@ class EagerDeletionRecurrentOpTest1(unittest.TestCase):
fetch_list = fetch_list ,
return_numpy = False )
def test_backward ( self , rtol = 0. 1) :
def test_backward ( self , rtol = 0. 0 1) :
self . check_forward ( )
with fluid . program_guard ( self . main_program , self . startup_program ) :
@ -208,7 +212,7 @@ class EagerDeletionRecurrentOpTest1(unittest.TestCase):
pd_output = self . forward ( )
py_output = self . py_rnn . forward ( )
self . assertEqual ( pd_output . shape , py_output . shape )
self . assertTrue ( np . isclose ( pd_output , py_output , rtol = 0. 1) . all ( ) )
self . assertTrue ( np . isclose ( pd_output , py_output , rtol = 0. 0 1) . all ( ) )
def get_numerical_gradient ( self , delta = 0.005 ) :
dloss_dout = 1.0
@ -278,14 +282,20 @@ class EagerDeletionRecurrentOpTest2(EagerDeletionRecurrentOpTest1):
h_pre = rnn . memory ( init = h_boot )
x_t = rnn . step_input ( x )
temp_l = layers . fc ( input = x_t ,
size = self . input_dim ,
param_attr = ' W ' ,
bias_attr = False )
temp_r = layers . fc ( input = h_pre ,
size = self . input_dim ,
param_attr = ' U ' ,
bias_attr = False )
temp_l = layers . fc (
input = x_t ,
size = self . input_dim ,
param_attr = ParamAttr (
name = ' W ' ,
initializer = fluid . initializer . ConstantInitializer ( 1.0 ) ) ,
bias_attr = False )
temp_r = layers . fc (
input = h_pre ,
size = self . input_dim ,
param_attr = ParamAttr (
name = ' U ' ,
initializer = fluid . initializer . ConstantInitializer ( 0.0 ) ) ,
bias_attr = False )
h = layers . sigmoid ( x = layers . elementwise_add ( x = temp_l , y = temp_r ) )
@ -295,7 +305,7 @@ class EagerDeletionRecurrentOpTest2(EagerDeletionRecurrentOpTest1):
return rnn ( )
def test_backward ( self ) :
super ( EagerDeletionRecurrentOpTest2 , self ) . test_backward ( rtol = 0. 2 )
super ( EagerDeletionRecurrentOpTest2 , self ) . test_backward ( rtol = 0. 01 )
class EagerDeletionRecurrentOpMultipleMemoryTest ( EagerDeletionRecurrentOpTest1 ) :
@ -496,7 +506,6 @@ class EagerDeletionTwoRecurrentOpsTest(EagerDeletionRecurrentOpTest1):
# Second RNN
pre_mem = np . zeros_like ( x ) if step_id == 0 else self . mem_1 [ step_id -
1 ]
# print(np.sum(self.rnn_0_output))
self . mem_1 [ step_id ] = x + np . sum ( self . rnn_0_output )
self . y [ step_id ] = self . mem_1 [ step_id ] + pre_mem
@ -675,8 +684,8 @@ class EagerDeletionFarwardOnlyRnnAndBackwardRnnTest(
self . assertEqual ( pd_output . shape , py_output . shape )
self . assertTrue (
np . isclose (
forward_only_output , py_output , rtol = 0. 1) . all )
self . assertTrue ( np . isclose ( pd_output , py_output , rtol = 0. 1) . all ( ) )
forward_only_output , py_output , rtol = 0. 0 1) . all )
self . assertTrue ( np . isclose ( pd_output , py_output , rtol = 0. 0 1) . all ( ) )
if __name__ == ' __main__ ' :