diff --git a/mindspore/nn/layer/basic.py b/mindspore/nn/layer/basic.py index 30205b0b0b..d643ecb664 100644 --- a/mindspore/nn/layer/basic.py +++ b/mindspore/nn/layer/basic.py @@ -37,11 +37,14 @@ __all__ = ['Dropout', 'Flatten', 'Dense', 'ClipByNorm', 'Norm', 'OneHot', 'Pad', class L1Regularizer(Cell): - """ + r""" Apply l1 regularization to weights l1 regularization makes weights sparsity + .. math:: + \text{loss}=\lambda * \text{reduce_sum}(\text{abs}(\omega)) + Note: scale(regularization factor) should be a number which greater than 0