add alias for fluid.initializer.set_global_initializer, alias is nn.initializer.set_global_initializer (#28690)

musl/fix_failed_unittests_in_musl
furnace 5 years ago committed by GitHub
parent f096af83a0
commit caffa85ffe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -955,7 +955,7 @@ def set_global_initializer(weight_init, bias_init=None):
After this API is invoked, the global initializer will takes effect in subsequent code. After this API is invoked, the global initializer will takes effect in subsequent code.
The model parameters include ``weight`` and ``bias`` . In the framework, they correspond The model parameters include ``weight`` and ``bias`` . In the framework, they correspond
to ``fluid.Parameter`` , which is inherited from ``fluid.Variable`` , and is a persistable Variable. to ``paddle.ParamAttr`` , which is inherited from ``paddle.Tensor`` , and is a persistable Variable.
This API only takes effect for model parameters, not for variables created through apis such as This API only takes effect for model parameters, not for variables created through apis such as
:ref:`api_fluid_layers_create_global_var` , :ref:`api_fluid_layers_create_tensor`. :ref:`api_fluid_layers_create_global_var` , :ref:`api_fluid_layers_create_tensor`.
@ -974,27 +974,30 @@ def set_global_initializer(weight_init, bias_init=None):
Examples: Examples:
.. code-block:: python .. code-block:: python
import paddle.fluid as fluid
fluid.set_global_initializer(fluid.initializer.Uniform(), fluid.initializer.Constant()) import paddle
x = fluid.data(name="x", shape=[1, 3, 32, 32]) import paddle.nn as nn
nn.initializer.set_global_initializer(nn.initializer.Uniform(), nn.initializer.Constant())
x_var = paddle.uniform((2, 4, 8, 8), dtype='float32', min=-1., max=1.)
# The weight of conv1 is initialized by Uniform # The weight of conv1 is initialized by Uniform
# The bias of conv1 is initialized by Constant # The bias of conv1 is initialized by Constant
conv1 = fluid.layers.conv2d(x, 5, 3) conv1 = nn.Conv2D(4, 6, (3, 3))
y_var1 = conv1(x_var)
# If set param_attr/bias_attr too, global initializer will not take effect # If set param_attr/bias_attr too, global initializer will not take effect
# The weight of conv2 is initialized by Xavier # The weight of conv2 is initialized by Xavier
# The bias of conv2 is initialized by Normal # The bias of conv2 is initialized by Normal
conv2 = fluid.layers.conv2d(conv1, 5, 3, conv2 = nn.Conv2D(4, 6, (3, 3),
param_attr=fluid.initializer.Xavier(), weight_attr=nn.initializer.XavierUniform(),
bias_attr=fluid.initializer.Normal()) bias_attr=nn.initializer.Normal())
y_var2 = conv2(x_var)
# Cancel the global initializer in framework, it will takes effect in subsequent code # Cancel the global initializer in framework, it will takes effect in subsequent code
fluid.set_global_initializer(None) nn.initializer.set_global_initializer(None)
""" """
check_type(weight_init, 'weight_init', (Initializer, type(None)), check_type(weight_init, 'weight_init', (Initializer, type(None)),
'set_global_initializer') 'set_global_initializer')
global _global_weight_initializer_ global _global_weight_initializer_

@ -14,6 +14,7 @@
# TODO: define the initializers to create a Parameter in neural network # TODO: define the initializers to create a Parameter in neural network
from ...fluid.initializer import Bilinear #DEFINE_ALIAS from ...fluid.initializer import Bilinear #DEFINE_ALIAS
from ...fluid.initializer import set_global_initializer #DEFINE_ALIAS
from . import constant from . import constant
from .constant import Constant #DEFINE_ALIAS from .constant import Constant #DEFINE_ALIAS
@ -22,7 +23,7 @@ from . import kaiming
from .kaiming import KaimingNormal #DEFINE_ALIAS from .kaiming import KaimingNormal #DEFINE_ALIAS
from .kaiming import KaimingUniform #DEFINE_ALIAS from .kaiming import KaimingUniform #DEFINE_ALIAS
__all__ = ['Bilinear', ] __all__ = ['Bilinear', 'set_global_initializer']
__all__ += constant.__all__ __all__ += constant.__all__
__all__ += kaiming.__all__ __all__ += kaiming.__all__

Loading…
Cancel
Save