From 1de9b60acee0c7c6ea455d36905455b56432c4ef Mon Sep 17 00:00:00 2001 From: xuezhong Date: Mon, 11 Feb 2019 16:36:01 +0000 Subject: [PATCH] pass layer test test=develop --- python/paddle/fluid/layers/nn.py | 2 +- python/paddle/fluid/tests/unittests/test_layers.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index e1387cec1d..16514fc214 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -5878,7 +5878,7 @@ def sampled_softmax_with_cross_entropy(logits, 'ignore_index': False, 'numeric_stable_mode': False }) - return outputs / num_true + return loss / num_true def smooth_l1(x, y, inside_weight=None, outside_weight=None, sigma=None): diff --git a/python/paddle/fluid/tests/unittests/test_layers.py b/python/paddle/fluid/tests/unittests/test_layers.py index b73a2fb866..30194f8cac 100644 --- a/python/paddle/fluid/tests/unittests/test_layers.py +++ b/python/paddle/fluid/tests/unittests/test_layers.py @@ -378,9 +378,10 @@ class TestBook(unittest.TestCase): program = Program() with program_guard(program): logits = layers.data(name='Logits', shape=[256], dtype='float64') - label = layers.data(name='Label', shape=[5], dtype='int64') + label = layers.data(name='Label', shape=[1], dtype='int64') num_samples = 25 - output = layers.sample_logits(logits, label, num_samples) + output = layers.sampled_softmax_with_cross_entropy(logits, label, + num_samples) self.assertIsNotNone(output) print(str(program))