!1329 Put the parameter validation of the Summary operator into the python layer

Merge pull request !1329 from ougongchang/master
pull/1329/MERGE
mindspore-ci-bot 6 years ago committed by Gitee
commit 45368a86e9

@ -57,9 +57,6 @@ std::string CNode::fullname_with_scope() {
if (IsApply(prim::kPrimScalarSummary) || IsApply(prim::kPrimTensorSummary) || IsApply(prim::kPrimImageSummary) || if (IsApply(prim::kPrimScalarSummary) || IsApply(prim::kPrimTensorSummary) || IsApply(prim::kPrimImageSummary) ||
IsApply(prim::kPrimHistogramSummary)) { IsApply(prim::kPrimHistogramSummary)) {
std::string tag = GetValue<std::string>(GetValueNode(input(1))); std::string tag = GetValue<std::string>(GetValueNode(input(1)));
if (tag == "") {
MS_LOG(EXCEPTION) << "The tag name is null, should be valid string";
}
std::string name; std::string name;
if (IsApply(prim::kPrimScalarSummary)) { if (IsApply(prim::kPrimScalarSummary)) {
name = tag + "[:Scalar]"; name = tag + "[:Scalar]";

@ -21,64 +21,5 @@
#include "utils/symbolic.h" #include "utils/symbolic.h"
namespace mindspore { namespace mindspore {
namespace abstract { namespace abstract {} // namespace abstract
AbstractBasePtr InferImplScalarSummary(const AnalysisEnginePtr &, const PrimitivePtr &primitive,
const AbstractBasePtrList &args_spec_list) {
// Inputs: a scalar and a tensor or scalar.
const std::string op_name = primitive->name();
CheckArgsSize(op_name, args_spec_list, 2);
// check the tag
AbstractScalarPtr descriptions = CheckArg<AbstractScalar>(op_name, args_spec_list, 0);
// check the value: scalar or shape = (1,)
auto scalar_value = dyn_cast<AbstractScalar>(args_spec_list[1]);
if (scalar_value == nullptr) {
auto tensor_value = dyn_cast<AbstractTensor>(args_spec_list[1]);
if (tensor_value == nullptr) {
MS_LOG(EXCEPTION) << "Input must be scalar or shape(1,)";
}
} else {
auto item_v = scalar_value->BuildValue();
if (item_v->isa<StringImm>()) {
auto value = item_v->cast<StringImmPtr>()->value();
if (value.empty()) {
MS_LOG(EXCEPTION) << "Input summary value can't be null";
}
}
}
// Reomve the force check to support batch set summary use 'for' loop
auto item_v = descriptions->BuildValue();
if (!item_v->isa<StringImm>()) {
MS_EXCEPTION(TypeError) << "Summary first parameter should be string";
}
return std::make_shared<AbstractScalar>(kAnyValue, kBool);
}
AbstractBasePtr InferImplTensorSummary(const AnalysisEnginePtr &, const PrimitivePtr &primitive,
const AbstractBasePtrList &args_spec_list) {
// Inputs: a scalar(tag) and a tensor(value)
const std::string op_name = primitive->name();
CheckArgsSize(op_name, args_spec_list, 2);
// check the tag
auto descriptions = CheckArg<AbstractScalar>(op_name, args_spec_list, 0);
auto tensor_value = CheckArg<AbstractTensor>(op_name, args_spec_list, 1);
int tensor_rank = SizeToInt(tensor_value->shape()->shape().size());
if (tensor_rank == 0) {
MS_LOG(EXCEPTION) << op_name << " summary evaluator second arg should be an tensor, but got a scalar, rank is 0";
}
// Reomve the force check to support batch set summary use 'for' loop
auto item_v = descriptions->BuildValue();
if (!item_v->isa<StringImm>()) {
MS_EXCEPTION(TypeError) << "Summary first parameter should be string";
}
return std::make_shared<AbstractScalar>(kAnyValue, std::make_shared<Bool>());
}
} // namespace abstract
} // namespace mindspore } // namespace mindspore

@ -128,11 +128,6 @@ PrimitiveEvalImplMap &GetPrimitiveToEvalImplMap() {
{prim::kPrimDepend, {InferImplDepend, true}}, {prim::kPrimDepend, {InferImplDepend, true}},
{prim::kPrimBroadcastGradientArgs, {InferImplBroadcastGradientArgs, false}}, {prim::kPrimBroadcastGradientArgs, {InferImplBroadcastGradientArgs, false}},
{prim::kPrimControlDepend, {InferImplControlDepend, true}}, {prim::kPrimControlDepend, {InferImplControlDepend, true}},
// Debug
{prim::kPrimScalarSummary, {InferImplScalarSummary, true}},
{prim::kPrimImageSummary, {InferImplTensorSummary, true}},
{prim::kPrimTensorSummary, {InferImplTensorSummary, true}},
{prim::kPrimHistogramSummary, {InferImplTensorSummary, true}},
}; };
return prim_eval_implement_map; return prim_eval_implement_map;
} }

@ -326,11 +326,6 @@ AbstractBasePtr InferImplBroadcastGradientArgs(const AnalysisEnginePtr &, const
const AbstractBasePtrList &args_spec_list); const AbstractBasePtrList &args_spec_list);
AbstractBasePtr InferImplControlDepend(const AnalysisEnginePtr &, const PrimitivePtr &primitive, AbstractBasePtr InferImplControlDepend(const AnalysisEnginePtr &, const PrimitivePtr &primitive,
const AbstractBasePtrList &args_spec_list); const AbstractBasePtrList &args_spec_list);
AbstractBasePtr InferImplScalarSummary(const AnalysisEnginePtr &, const PrimitivePtr &primitive,
const AbstractBasePtrList &args_spec_list);
AbstractBasePtr InferImplTensorSummary(const AnalysisEnginePtr &, const PrimitivePtr &primitive,
const AbstractBasePtrList &args_spec_list);
} // namespace abstract } // namespace abstract
} // namespace mindspore } // namespace mindspore

@ -16,10 +16,22 @@
"""debug_ops""" """debug_ops"""
from ..._checkparam import Validator as validator from ..._checkparam import Validator as validator
from ...common import dtype as mstype from ...common import dtype as mstype
from ..primitive import Primitive, prim_attr_register, PrimitiveWithInfer from ..primitive import prim_attr_register, PrimitiveWithInfer
class ScalarSummary(Primitive): def _check_summary_param(name, value, class_name):
"""Check the name and value is valid for summary."""
n_type = name['dtype']
n_value = name['value']
validator.check_value_type('name', n_type, [type(mstype.string)], class_name)
if not n_value:
raise ValueError(f"For 'name' the value should by valid string in {class_name}, but got {n_value}.")
v_type = value['dtype']
validator.check_value_type('value', v_type, [type(mstype.tensor)], class_name)
class ScalarSummary(PrimitiveWithInfer):
""" """
Output scalar to protocol buffer through scalar summary operator. Output scalar to protocol buffer through scalar summary operator.
@ -45,11 +57,19 @@ class ScalarSummary(Primitive):
def __init__(self): def __init__(self):
"""init""" """init"""
def __call__(self, *args, **kwargs): def __infer__(self, name, value):
pass _check_summary_param(name, value, self.__class__.__name__)
v_shape = value['shape']
# In the summary, the value whose shape is [1] is also considered as a scalar.
if v_shape and v_shape != [1]:
raise ValueError(f"For 'value' the type should be scalar, "
f"shape should be [] or [1] in {self.__class__.__name__}, but got {v_shape}.")
class ImageSummary(Primitive): return value
class ImageSummary(PrimitiveWithInfer):
""" """
Output image tensor to protocol buffer through image summary operator. Output image tensor to protocol buffer through image summary operator.
@ -73,11 +93,20 @@ class ImageSummary(Primitive):
def __init__(self): def __init__(self):
"""init""" """init"""
def __call__(self, *args, **kwargs): def __infer__(self, name, value):
pass _check_summary_param(name, value, self.__class__.__name__)
# The shape dim of image should be 4.
v_shape = value['shape']
image_dim = 4
if len(v_shape) != image_dim:
raise ValueError(f"For 'value' the dim should be {image_dim} in {self.__class__.__name__},"
f" but got {len(v_shape)}.")
return value
class TensorSummary(Primitive):
class TensorSummary(PrimitiveWithInfer):
""" """
Output tensor to protocol buffer through tensor summary operator. Output tensor to protocol buffer through tensor summary operator.
@ -103,11 +132,19 @@ class TensorSummary(Primitive):
def __init__(self): def __init__(self):
"""init""" """init"""
def __call__(self, *args, **kwargs): def __infer__(self, name, value):
pass _check_summary_param(name, value, self.__class__.__name__)
v_shape = value['shape']
# In the summary, the value whose shape is [] is not considered as a tensor.
if not v_shape:
raise ValueError(f"For 'value' the type should be tensor in {self.__class__.__name__}, "
f"shape should not be [].")
return value
class HistogramSummary(Primitive): class HistogramSummary(PrimitiveWithInfer):
""" """
Output tensor to protocol buffer through histogram summary operator. Output tensor to protocol buffer through histogram summary operator.
@ -133,6 +170,17 @@ class HistogramSummary(Primitive):
def __init__(self): def __init__(self):
"""init""" """init"""
def __infer__(self, name, value):
_check_summary_param(name, value, self.__class__.__name__)
v_shape = value['shape']
# In the summary, the histogram value should be a tensor whose shape is not [].
if not v_shape:
raise ValueError(f"For 'value' the type should be tensor in {self.__class__.__name__}, "
f"shape should not be [].")
return value
class InsertGradientOf(PrimitiveWithInfer): class InsertGradientOf(PrimitiveWithInfer):
""" """

@ -12,11 +12,14 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# ============================================================================ # ============================================================================
"""Summary gpu st."""
import os import os
import random import random
import tempfile
import shutil import shutil
import pytest
import numpy as np import numpy as np
import pytest
import mindspore.context as context import mindspore.context as context
import mindspore.nn as nn import mindspore.nn as nn
@ -26,36 +29,9 @@ from mindspore.train.summary.summary_record import SummaryRecord
context.set_context(mode=context.GRAPH_MODE, device_target="GPU") context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
CUR_DIR = os.getcwd()
SUMMARY_DIR_ME = CUR_DIR + "/test_me_summary_event_file/"
SUMMARY_DIR_ME_TEMP = CUR_DIR + "/test_me_temp_summary_event_file/"
def clean_environment_file(srcDir):
if os.path.exists(srcDir):
ls = os.listdir(srcDir)
for line in ls:
filePath = os.path.join(srcDir, line)
os.remove(filePath)
os.removedirs(srcDir)
def save_summary_events_file(srcDir, desDir):
if not os.path.exists(desDir):
print("-- create desDir")
os.makedirs(desDir)
ls = os.listdir(srcDir)
for line in ls:
filePath = os.path.join(srcDir, line)
if os.path.isfile(filePath):
print("-- move events file : {}".format(filePath))
shutil.copy(filePath, desDir)
os.remove(filePath)
os.removedirs(srcDir)
class SummaryNet(nn.Cell): class SummaryNet(nn.Cell):
"""Summary net."""
def __init__(self, tag_tuple=None, scalar=1): def __init__(self, tag_tuple=None, scalar=1):
super(SummaryNet, self).__init__() super(SummaryNet, self).__init__()
self.summary_s = P.ScalarSummary() self.summary_s = P.ScalarSummary()
@ -66,8 +42,9 @@ class SummaryNet(nn.Cell):
self.tag_tuple = tag_tuple self.tag_tuple = tag_tuple
self.scalar = scalar self.scalar = scalar
def construct(self, x, y): def construct(self, x, y, image):
self.summary_i("image", x) """Run summary net."""
self.summary_i("image", image)
self.summary_s("x1", x) self.summary_s("x1", x)
z = self.add(x, y) z = self.add(x, y)
self.summary_t("z1", z) self.summary_t("z1", z)
@ -75,32 +52,38 @@ class SummaryNet(nn.Cell):
return z return z
def train_summary_record_scalar_for_1(test_writer, steps): def train_summary_record(test_writer, steps):
"""Train and record summary."""
net = SummaryNet() net = SummaryNet()
out_me_dict = {} out_me_dict = {}
for i in range(0, steps): for i in range(0, steps):
x = Tensor(np.array([1.1 + random.uniform(1, 10)]).astype(np.float32)) x = Tensor(np.array([1.1 + random.uniform(1, 10)]).astype(np.float32))
y = Tensor(np.array([1.2 + random.uniform(1, 10)]).astype(np.float32)) y = Tensor(np.array([1.2 + random.uniform(1, 10)]).astype(np.float32))
out_put = net(x, y) image = Tensor(np.array([[[[1.2]]]]).astype(np.float32))
out_put = net(x, y, image)
test_writer.record(i) test_writer.record(i)
print("-----------------output: %s-------------\n", out_put.asnumpy())
out_me_dict[i] = out_put.asnumpy() out_me_dict[i] = out_put.asnumpy()
return out_me_dict return out_me_dict
def me_scalar_summary(steps): class TestGpuSummary:
with SummaryRecord(SUMMARY_DIR_ME_TEMP) as test_writer: """Test Gpu summary."""
out_me_dict = train_summary_record_scalar_for_1(test_writer, steps) summary_dir = tempfile.mkdtemp(suffix='_gpu_summary')
return out_me_dict def setup_method(self):
"""Run before method."""
if not os.path.exists(self.summary_dir):
os.mkdir(self.summary_dir)
def teardown_emthod(self):
"""Run after method."""
if os.path.exists(self.summary_dir):
shutil.rmtree(self.summary_dir)
@pytest.mark.level0 @pytest.mark.level0
@pytest.mark.platform_x86_gpu_training @pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard @pytest.mark.env_onecard
def test_scalarsummary_scalar1_step10_summaryrecord1(): def test_summary_step10_summaryrecord1(self):
clean_environment_file(SUMMARY_DIR_ME_TEMP) """Test record 10 step summary."""
output_dict = me_scalar_summary(10) with SummaryRecord(self.summary_dir) as test_writer:
print("test_scalarsummary_scalar1_step10_summaryrecord1 \n", output_dict) train_summary_record(test_writer, steps=10)
save_summary_events_file(SUMMARY_DIR_ME_TEMP, SUMMARY_DIR_ME)
clean_environment_file(SUMMARY_DIR_ME)

@ -492,7 +492,7 @@ test_cases = [
}), }),
('ScalarSummary', { ('ScalarSummary', {
'block': ScalarSummaryNet(), 'block': ScalarSummaryNet(),
'desc_inputs': [2.2], 'desc_inputs': [Tensor(2.2)],
}), }),
('L2Normalize', { ('L2Normalize', {
'block': L2NormalizeNet(), 'block': L2NormalizeNet(),

@ -112,7 +112,7 @@ def test_InsertGradientOf_3():
def f(x, y): def f(x, y):
return C.grad_all(debug_test)(x, y) return C.grad_all(debug_test)(x, y)
print("debug_gradient:", f(1, 2)) print("debug_gradient:", f(Tensor(1.0), Tensor(2.0)))
def test_print_shape_type(): def test_print_shape_type():

Loading…
Cancel
Save