From dfaf6b5eea2d99303eef692f888d04324db7b2d4 Mon Sep 17 00:00:00 2001 From: chajchaj <57249073+chajchaj@users.noreply.github.com> Date: Wed, 25 Nov 2020 16:07:10 +0800 Subject: [PATCH] save one name in cross_entropy and softmax_cross_entropy, test=develop (#29074) * save one name in cross_entropy and softmax_cross_entropy, test=develop * change used function in CrossEntropy from softmax_cross_entropy to cross_entropy, test=develop --- python/paddle/nn/functional/__init__.py | 1 - python/paddle/nn/functional/loss.py | 20 -------------------- python/paddle/nn/layer/loss.py | 2 +- 3 files changed, 1 insertion(+), 22 deletions(-) diff --git a/python/paddle/nn/functional/__init__.py b/python/paddle/nn/functional/__init__.py index c2d6fce670..cec69d6998 100644 --- a/python/paddle/nn/functional/__init__.py +++ b/python/paddle/nn/functional/__init__.py @@ -129,7 +129,6 @@ from .loss import binary_cross_entropy_with_logits #DEFINE_ALIAS # from .loss import bpr_loss #DEFINE_ALIAS # from .loss import center_loss #DEFINE_ALIAS #from .loss import cross_entropy #DEFINE_ALIAS -from .loss import softmax_cross_entropy #DEFINE_ALIAS from .loss import cross_entropy #DEFINE_ALIAS from .loss import dice_loss #DEFINE_ALIAS from .loss import hsigmoid_loss #DEFINE_ALIAS diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index b3ed491a54..c616f7bd22 100755 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -42,7 +42,6 @@ __all__ = [ 'binary_cross_entropy', 'binary_cross_entropy_with_logits', 'cross_entropy', - 'softmax_cross_entropy', 'dice_loss', 'hsigmoid_loss', 'kl_div', @@ -1125,25 +1124,6 @@ def cross_entropy(input, soft_label=False, axis=-1, name=None): - return softmax_cross_entropy( - input=input, - label=label, - weight=weight, - ignore_index=ignore_index, - reduction=reduction, - soft_label=soft_label, - axis=axis, - name=name) - - -def softmax_cross_entropy(input, - label, - weight=None, - ignore_index=-100, - reduction='mean', - soft_label=False, - axis=-1, - name=None): """ This operator implements the cross entropy loss function with softmax. This function combines the calculation of the softmax operation and the cross entropy loss function diff --git a/python/paddle/nn/layer/loss.py b/python/paddle/nn/layer/loss.py index a6d1152adf..5bc33d0f0f 100644 --- a/python/paddle/nn/layer/loss.py +++ b/python/paddle/nn/layer/loss.py @@ -238,7 +238,7 @@ class CrossEntropyLoss(fluid.dygraph.Layer): self.name = name def forward(self, input, label): - ret = paddle.nn.functional.softmax_cross_entropy( + ret = paddle.nn.functional.cross_entropy( input, label, weight=self.weight,