save one name in cross_entropy and softmax_cross_entropy, test=develop (#29074)

* save one name in cross_entropy and softmax_cross_entropy, test=develop

* change used function in CrossEntropy from softmax_cross_entropy to cross_entropy, test=develop
musl/disable_test_yolov3_temporarily
chajchaj 4 years ago committed by GitHub
parent a5aa4dc7a9
commit dfaf6b5eea
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -129,7 +129,6 @@ from .loss import binary_cross_entropy_with_logits #DEFINE_ALIAS
# from .loss import bpr_loss #DEFINE_ALIAS
# from .loss import center_loss #DEFINE_ALIAS
#from .loss import cross_entropy #DEFINE_ALIAS
from .loss import softmax_cross_entropy #DEFINE_ALIAS
from .loss import cross_entropy #DEFINE_ALIAS
from .loss import dice_loss #DEFINE_ALIAS
from .loss import hsigmoid_loss #DEFINE_ALIAS

@ -42,7 +42,6 @@ __all__ = [
'binary_cross_entropy',
'binary_cross_entropy_with_logits',
'cross_entropy',
'softmax_cross_entropy',
'dice_loss',
'hsigmoid_loss',
'kl_div',
@ -1125,25 +1124,6 @@ def cross_entropy(input,
soft_label=False,
axis=-1,
name=None):
return softmax_cross_entropy(
input=input,
label=label,
weight=weight,
ignore_index=ignore_index,
reduction=reduction,
soft_label=soft_label,
axis=axis,
name=name)
def softmax_cross_entropy(input,
label,
weight=None,
ignore_index=-100,
reduction='mean',
soft_label=False,
axis=-1,
name=None):
"""
This operator implements the cross entropy loss function with softmax. This function
combines the calculation of the softmax operation and the cross entropy loss function

@ -238,7 +238,7 @@ class CrossEntropyLoss(fluid.dygraph.Layer):
self.name = name
def forward(self, input, label):
ret = paddle.nn.functional.softmax_cross_entropy(
ret = paddle.nn.functional.cross_entropy(
input,
label,
weight=self.weight,

Loading…
Cancel
Save