Fix api for ErrorClipByValue, code demo of clip_by_norm. test=develop (#27654)

* Fix ErrorClipByValue api and demo code of clip_by_value. test=develop

Co-authored-by: tianshuo78520a <707759223@qq.com>
my_2.0rc
ysh329 5 years ago committed by GitHub
parent 54c368db1e
commit 9cd86487cc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -12415,12 +12415,17 @@ def clip_by_norm(x, max_norm, name=None):
Examples:
.. code-block:: python
import paddle.fluid as fluid
input = fluid.data(
name='data', shape=[None, 1], dtype='float32')
reward = fluid.layers.clip_by_norm(x=input, max_norm=1.0)
import paddle
import numpy as np
paddle.disable_static()
input = paddle.to_tensor(data=np.array([[0.1, 0.2], [0.3, 0.4]]), dtype="float32")
reward = paddle.nn.clip_by_norm(x=input, max_norm=1.0)
"""
if in_dygraph_mode():
return core.ops.clip_by_norm(x, 'max_norm', max_norm)
helper = LayerHelper("clip_by_norm", **locals())
check_variable_and_dtype(x, 'X', ['float32'], 'clip_by_norm')
check_type(max_norm, 'max_norm', (float), 'clip_by_norm')

@ -31,7 +31,6 @@ __all__ += rnn.__all__
__all__ += weight_norm_hook.__all__
# TODO: define alias in nn directory
# from .clip import ErrorClipByValue #DEFINE_ALIAS
from .clip import GradientClipByGlobalNorm #DEFINE_ALIAS
from .clip import GradientClipByNorm #DEFINE_ALIAS
from .clip import GradientClipByValue #DEFINE_ALIAS

Loading…
Cancel
Save