From 71417617a59c8de5dd91b506876b24dfb44cee03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cdangjiaqi1=E2=80=9D?= <“dangjiaqi1@huawei.com”> Date: Thu, 7 Jan 2021 21:27:47 +0800 Subject: [PATCH] update L1Regularizer annotation --- mindspore/nn/layer/basic.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mindspore/nn/layer/basic.py b/mindspore/nn/layer/basic.py index 30205b0b0b..d643ecb664 100644 --- a/mindspore/nn/layer/basic.py +++ b/mindspore/nn/layer/basic.py @@ -37,11 +37,14 @@ __all__ = ['Dropout', 'Flatten', 'Dense', 'ClipByNorm', 'Norm', 'OneHot', 'Pad', class L1Regularizer(Cell): - """ + r""" Apply l1 regularization to weights l1 regularization makes weights sparsity + .. math:: + \text{loss}=\lambda * \text{reduce_sum}(\text{abs}(\omega)) + Note: scale(regularization factor) should be a number which greater than 0